From a10889c56410d56f8af5b545f378da4937f5b52e Mon Sep 17 00:00:00 2001 From: Andrew Cobb Date: Sat, 16 Nov 2024 12:31:03 -0700 Subject: [PATCH 1/5] Fix wmemcheck for swift-wasm usage --- crates/cranelift/src/func_environ.rs | 162 ++++++++++++++++++--- crates/environ/src/builtin.rs | 22 ++- crates/wasmtime/src/runtime/vm/libcalls.rs | 71 +++++++-- crates/wmemcheck/src/lib.rs | 27 +++- 4 files changed, 243 insertions(+), 39 deletions(-) diff --git a/crates/cranelift/src/func_environ.rs b/crates/cranelift/src/func_environ.rs index 939ef6fde9e6..545aaab11974 100644 --- a/crates/cranelift/src/func_environ.rs +++ b/crates/cranelift/src/func_environ.rs @@ -585,6 +585,112 @@ impl<'module_environment> FuncEnvironment<'module_environment> { builder.ins().call(check_malloc, &[vmctx, retval, len]); } + #[cfg(feature = "wmemcheck")] + fn hook_calloc_exit(&mut self, builder: &mut FunctionBuilder, retvals: &[ir::Value]) { + let check_calloc = self.builtin_functions.check_calloc(builder.func); + let vmctx = self.vmctx_val(&mut builder.cursor()); + let func_args = builder + .func + .dfg + .block_params(builder.func.layout.entry_block().unwrap()); + let (count, size) = if func_args.len() < 4 { + return; + } else { + (func_args[2], func_args[3]) + }; + let retval = if retvals.len() < 1 { + return; + } else { + retvals[0] + }; + builder.ins().call(check_calloc, &[vmctx, retval, count, size]); + } + + #[cfg(feature = "wmemcheck")] + fn hook_realloc_exit(&mut self, builder: &mut FunctionBuilder, retvals: &[ir::Value]) { + let check_realloc = self.builtin_functions.check_realloc(builder.func); + let vmctx = self.vmctx_val(&mut builder.cursor()); + let func_args = builder + .func + .dfg + .block_params(builder.func.layout.entry_block().unwrap()); + let (ptr, len) = if func_args.len() < 4 { + return; + } else { + // If a function named `realloc` has at least two arguments, we assume the + // first arguments are the pointer and requested allocation size. + (func_args[2], func_args[3]) + }; + let retval = if retvals.len() < 1 { + return; + } else { + retvals[0] + }; + builder.ins().call(check_realloc, &[vmctx, retval, ptr, len]); + } + + #[cfg(feature = "wmemcheck")] + fn hook_malloc_usable_size_exit(&mut self, builder: &mut FunctionBuilder, retvals: &[ir::Value]) { + let check_malloc_usable_size = self.builtin_functions.check_malloc_usable_size(builder.func); + let vmctx = self.vmctx_val(&mut builder.cursor()); + let func_args = builder + .func + .dfg + .block_params(builder.func.layout.entry_block().unwrap()); + let ptr = if func_args.len() < 3 { + return; + } else { + func_args[2] + }; + let retval = if retvals.len() < 1 { + return; + } else { + retvals[0] + }; + builder.ins().call(check_malloc_usable_size, &[vmctx, retval, ptr]); + } + + #[cfg(feature = "wmemcheck")] + fn hook_posix_memalign_exit(&mut self, builder: &mut FunctionBuilder) { + let check_posix_memalign = self.builtin_functions.check_posix_memalign(builder.func); + let vmctx = self.vmctx_val(&mut builder.cursor()); + let func_args = builder + .func + .dfg + .block_params(builder.func.layout.entry_block().unwrap()); + let (outptr, _alignment, size) = if func_args.len() < 5 { + return; + } else { + // If a function named `malloc` has at least one argument, we assume the + // first argument is the requested allocation size. + (func_args[2], func_args[3], func_args[4]) + }; + builder.ins().call(check_posix_memalign, &[vmctx, outptr, size]); + } + + #[cfg(feature = "wmemcheck")] + fn hook_aligned_alloc(&mut self, builder: &mut FunctionBuilder, retvals: &[ir::Value]) { + let check_malloc = self.builtin_functions.check_malloc(builder.func); + let vmctx = self.vmctx_val(&mut builder.cursor()); + let func_args = builder + .func + .dfg + .block_params(builder.func.layout.entry_block().unwrap()); + let (_alignment, size) = if func_args.len() < 4 { + return; + } else { + // If a function named `malloc` has at least one argument, we assume the + // first argument is the requested allocation size. + (func_args[2], func_args[3]) + }; + let retval = if retvals.len() < 1 { + return; + } else { + retvals[0] + }; + builder.ins().call(check_malloc, &[vmctx, retval, size]); + } + #[cfg(feature = "wmemcheck")] fn hook_free_exit(&mut self, builder: &mut FunctionBuilder) { let check_free = self.builtin_functions.check_free(builder.func); @@ -927,17 +1033,10 @@ impl<'module_environment> FuncEnvironment<'module_environment> { } #[cfg(feature = "wmemcheck")] - fn check_malloc_start(&mut self, builder: &mut FunctionBuilder) { - let malloc_start = self.builtin_functions.malloc_start(builder.func); - let vmctx = self.vmctx_val(&mut builder.cursor()); - builder.ins().call(malloc_start, &[vmctx]); - } - - #[cfg(feature = "wmemcheck")] - fn check_free_start(&mut self, builder: &mut FunctionBuilder) { - let free_start = self.builtin_functions.free_start(builder.func); + fn hook_memcheck_off(&mut self, builder: &mut FunctionBuilder) { + let memcheck_off = self.builtin_functions.memcheck_off(builder.func); let vmctx = self.vmctx_val(&mut builder.cursor()); - builder.ins().call(free_start, &[vmctx]); + builder.ins().call(memcheck_off, &[vmctx]); } #[cfg(feature = "wmemcheck")] @@ -3088,11 +3187,22 @@ impl<'module_environment> crate::translate::FuncEnvironment #[cfg(feature = "wmemcheck")] if self.wmemcheck { - let func_name = self.current_func_name(builder); - if func_name == Some("malloc") { - self.check_malloc_start(builder); - } else if func_name == Some("free") { - self.check_free_start(builder); + match self.current_func_name(builder) { + Some("__wrap_malloc") | Some("malloc") => + self.hook_memcheck_off(builder), + Some("__wrap_calloc") | Some("calloc") => + self.hook_memcheck_off(builder), + Some("__wrap_realloc") | Some("realloc") => + self.hook_memcheck_off(builder), + Some("__wrap_malloc_usable_size") | Some("malloc_usable_size") => + self.hook_memcheck_off(builder), + Some("__wrap_posix_memalign") | Some("posix_memalign") => + self.hook_memcheck_off(builder), + Some("__wrap_aligned_alloc") | Some("aligned_alloc") => + self.hook_memcheck_off(builder), + Some("__wrap_free") | Some("free") => + self.hook_memcheck_off(builder), + _ => () } } @@ -3141,11 +3251,23 @@ impl<'module_environment> crate::translate::FuncEnvironment #[cfg(feature = "wmemcheck")] fn handle_before_return(&mut self, retvals: &[ir::Value], builder: &mut FunctionBuilder) { if self.wmemcheck { - let func_name = self.current_func_name(builder); - if func_name == Some("malloc") { - self.hook_malloc_exit(builder, retvals); - } else if func_name == Some("free") { - self.hook_free_exit(builder); + let name = self.current_func_name(builder); + match name { + Some("__wrap_malloc") | Some("malloc") => + self.hook_malloc_exit(builder, retvals), + Some("__wrap_calloc") | Some("calloc") => + self.hook_calloc_exit(builder, retvals), + Some("__wrap_realloc") | Some("realloc") => + self.hook_realloc_exit(builder, retvals), + Some("__wrap_malloc_usable_size") | Some("malloc_usable_size") => + self.hook_malloc_usable_size_exit(builder, retvals), + Some("__wrap_posix_memalign") | Some("posix_memalign") => + self.hook_posix_memalign_exit(builder), + Some("__wrap_aligned_alloc") | Some("aligned_alloc") => + self.hook_aligned_alloc(builder, retvals), + Some("__wrap_free") | Some("free") => + self.hook_free_exit(builder), + _ => () } } } diff --git a/crates/environ/src/builtin.rs b/crates/environ/src/builtin.rs index 090db85bee84..dd1bd38dc4ae 100644 --- a/crates/environ/src/builtin.rs +++ b/crates/environ/src/builtin.rs @@ -44,6 +44,18 @@ macro_rules! foreach_builtin_function { // Invoked before malloc returns. #[cfg(feature = "wmemcheck")] check_malloc(vmctx: vmctx, addr: i32, len: i32) -> i32; + // Invoked before calloc returns. + #[cfg(feature = "wmemcheck")] + check_calloc(vmctx: vmctx, addr: i32, count: i32, size: i32) -> i32; + // Invoked before realloc returns. + #[cfg(feature = "wmemcheck")] + check_realloc(vmctx: vmctx, end_addr: i32, start_addr: i32, len: i32) -> i32; + // Invoked before malloc_usable_size returns. + #[cfg(feature = "wmemcheck")] + check_malloc_usable_size(vmctx: vmctx, len: i32, addr: i32) -> i32; + // Invoked before posix_memalign returns. + #[cfg(feature = "wmemcheck")] + check_posix_memalign(vmctx: vmctx, outptr: i32, size: i32) -> i32; // Invoked before the free returns. #[cfg(feature = "wmemcheck")] check_free(vmctx: vmctx, addr: i32) -> i32; @@ -53,12 +65,6 @@ macro_rules! foreach_builtin_function { // Invoked before a store is executed. #[cfg(feature = "wmemcheck")] check_store(vmctx: vmctx, num_bytes: i32, addr: i32, offset: i32) -> i32; - // Invoked after malloc is called. - #[cfg(feature = "wmemcheck")] - malloc_start(vmctx: vmctx); - // Invoked after free is called. - #[cfg(feature = "wmemcheck")] - free_start(vmctx: vmctx); // Invoked when wasm stack pointer is updated. #[cfg(feature = "wmemcheck")] update_stack_pointer(vmctx: vmctx, value: i32); @@ -66,6 +72,10 @@ macro_rules! foreach_builtin_function { #[cfg(feature = "wmemcheck")] update_mem_size(vmctx: vmctx, num_bytes: i32); + // Invoked before stuff is called. + #[cfg(feature = "wmemcheck")] + memcheck_off(vmctx: vmctx); + // Drop a non-stack GC reference (eg an overwritten table entry) // once it will no longer be used again. (Note: `val` is not of type // `reference` because it needn't appear in any stack maps, as it diff --git a/crates/wasmtime/src/runtime/vm/libcalls.rs b/crates/wasmtime/src/runtime/vm/libcalls.rs index f91426d2970f..252c2e6f5d7b 100644 --- a/crates/wasmtime/src/runtime/vm/libcalls.rs +++ b/crates/wasmtime/src/runtime/vm/libcalls.rs @@ -1137,6 +1137,40 @@ unsafe fn check_malloc( Ok(0) } +// Hook for validating calloc using wmemcheck_state. +#[cfg(feature = "wmemcheck")] +unsafe fn check_calloc(store: &mut dyn VMStore, instance: &mut Instance, addr: u32, count: u32, size: u32) -> Result { + check_malloc(store, instance, addr, count * size) +} + +// Hook for validating realloc using wmemcheck_state. +#[cfg(feature = "wmemcheck")] +unsafe fn check_realloc(store: &mut dyn VMStore, instance: &mut Instance, end_addr: u32, start_addr: u32, len: u32) -> Result { + check_free(store, instance, start_addr)?; + check_malloc(store, instance, end_addr, len) +} + +// Hook for validating malloc_usable_size using wmemcheck_state. +#[cfg(feature = "wmemcheck")] +unsafe fn check_malloc_usable_size(store: &mut dyn VMStore, instance: &mut Instance, len: u32, addr: u32) -> Result { + check_free(store, instance, addr)?; + check_malloc(store, instance, addr, len) +} + + +// Hook for validating posix_memalign using wmemcheck_state. +#[cfg(feature = "wmemcheck")] +unsafe fn check_posix_memalign(store: &mut dyn VMStore, instance: &mut Instance, outptr: u32, size: u32) -> Result { + for (_, entry) in instance.exports() { + if let wasmtime_environ::EntityIndex::Memory(mem_index) = entry { + let mem = instance.get_memory(*mem_index); + let out = *(mem.base.offset(outptr as isize) as *mut u32); + return check_malloc(store, instance, out, size) + } + } + todo!("Why is there no memory?") +} + // Hook for validating free using wmemcheck_state. #[cfg(feature = "wmemcheck")] unsafe fn check_free(_store: &mut dyn VMStore, instance: &mut Instance, addr: u32) -> Result { @@ -1158,6 +1192,30 @@ unsafe fn check_free(_store: &mut dyn VMStore, instance: &mut Instance, addr: u3 Ok(0) } +#[cfg(feature = "wmemcheck")] +fn log_allocation_previous_to(instance: &mut Instance, addr: usize) { + if let Some(wmemcheck_state) = &mut instance.wmemcheck_state { + if let Some((prev_malloc, prev_len)) = wmemcheck_state.malloc_previous_to(addr) { + println!("previous malloc'd range was {:#x}..{:#x}", prev_malloc, prev_malloc + prev_len); + for (_, entry) in instance.exports() { + if let wasmtime_environ::EntityIndex::Memory(mem_index) = entry { + let mem = instance.get_memory(*mem_index); + for i in 0..prev_len { + if i > 0 && i % 40 == 0 { + println!(); + } + unsafe { + print!("{:02x} ", *mem.base.offset((prev_malloc + i) as isize)); + } + } + println!(); + break + } + } + } + } +} + // Hook for validating load using wmemcheck_state. #[cfg(feature = "wmemcheck")] fn check_load( @@ -1174,6 +1232,7 @@ fn check_load( return Ok(0); } Err(InvalidRead { addr, len }) => { + log_allocation_previous_to(instance, addr); bail!("Invalid load at addr {:#x} of size {}", addr, len); } Err(OutOfBounds { addr, len }) => { @@ -1203,6 +1262,7 @@ fn check_store( return Ok(0); } Err(InvalidWrite { addr, len }) => { + log_allocation_previous_to(instance, addr); bail!("Invalid store at addr {:#x} of size {}", addr, len) } Err(OutOfBounds { addr, len }) => { @@ -1216,17 +1276,8 @@ fn check_store( Ok(0) } -// Hook for turning wmemcheck load/store validation off when entering a malloc function. -#[cfg(feature = "wmemcheck")] -fn malloc_start(_store: &mut dyn VMStore, instance: &mut Instance) { - if let Some(wmemcheck_state) = &mut instance.wmemcheck_state { - wmemcheck_state.memcheck_off(); - } -} - -// Hook for turning wmemcheck load/store validation off when entering a free function. #[cfg(feature = "wmemcheck")] -fn free_start(_store: &mut dyn VMStore, instance: &mut Instance) { +fn memcheck_off(_store: &mut dyn VMStore, instance: &mut Instance) { if let Some(wmemcheck_state) = &mut instance.wmemcheck_state { wmemcheck_state.memcheck_off(); } diff --git a/crates/wmemcheck/src/lib.rs b/crates/wmemcheck/src/lib.rs index 742f090b68af..72ce6aa0adca 100644 --- a/crates/wmemcheck/src/lib.rs +++ b/crates/wmemcheck/src/lib.rs @@ -10,6 +10,21 @@ pub struct Wmemcheck { pub flag: bool, } +impl Wmemcheck { + pub fn malloc_previous_to(&self, addr: usize) -> Option<(usize, usize)> { + let mut best: Option<(usize, usize)> = None; + for (base, len) in self.mallocs.iter() { + if let Some((prev_base, _)) = best { + if prev_base < *base && *base <= addr { + best = Some((*base, *len)); + } + } else { + best = Some((*base, *len)); + } + } + best + } +} /// Error types for memory checker. #[derive(Debug, PartialEq)] pub enum AccessError { @@ -51,7 +66,10 @@ impl Wmemcheck { } /// Updates memory checker memory state metadata when malloc is called. - pub fn malloc(&mut self, addr: usize, len: usize) -> Result<(), AccessError> { + pub fn malloc(&mut self, addr: usize, start_len: usize) -> Result<(), AccessError> { + // round up to multiple of 4 + let len = (start_len + 3) & !3; + if !self.is_in_bounds_heap(addr, len) { return Err(AccessError::OutOfBounds { addr: addr, @@ -101,12 +119,12 @@ impl Wmemcheck { len: len, }); } - MemState::ValidToWrite => { + /* MemState::ValidToWrite => { return Err(AccessError::InvalidRead { addr: addr, len: len, }); - } + } */ _ => {} } } @@ -140,6 +158,9 @@ impl Wmemcheck { /// Updates memory checker memory state metadata when free is called. pub fn free(&mut self, addr: usize) -> Result<(), AccessError> { + if addr == 0 { + return Ok(()); + } if !self.mallocs.contains_key(&addr) { return Err(AccessError::InvalidFree { addr: addr }); } From ac6abdcd50902e9966324e4439460a1101f0bc8e Mon Sep 17 00:00:00 2001 From: Andrew Cobb Date: Tue, 19 Nov 2024 17:47:41 -0700 Subject: [PATCH 2/5] cleanup --- crates/cranelift/src/func_environ.rs | 220 +++++---------------- crates/environ/src/builtin.rs | 22 ++- crates/wasmtime/src/runtime/vm/libcalls.rs | 81 +++----- 3 files changed, 95 insertions(+), 228 deletions(-) diff --git a/crates/cranelift/src/func_environ.rs b/crates/cranelift/src/func_environ.rs index 545aaab11974..413eecf78be8 100644 --- a/crates/cranelift/src/func_environ.rs +++ b/crates/cranelift/src/func_environ.rs @@ -563,150 +563,26 @@ impl<'module_environment> FuncEnvironment<'module_environment> { } #[cfg(feature = "wmemcheck")] - fn hook_malloc_exit(&mut self, builder: &mut FunctionBuilder, retvals: &[ir::Value]) { - let check_malloc = self.builtin_functions.check_malloc(builder.func); - let vmctx = self.vmctx_val(&mut builder.cursor()); - let func_args = builder - .func - .dfg - .block_params(builder.func.layout.entry_block().unwrap()); - let len = if func_args.len() < 3 { - return; - } else { - // If a function named `malloc` has at least one argument, we assume the - // first argument is the requested allocation size. - func_args[2] - }; - let retval = if retvals.len() < 1 { - return; - } else { - retvals[0] - }; - builder.ins().call(check_malloc, &[vmctx, retval, len]); - } - - #[cfg(feature = "wmemcheck")] - fn hook_calloc_exit(&mut self, builder: &mut FunctionBuilder, retvals: &[ir::Value]) { - let check_calloc = self.builtin_functions.check_calloc(builder.func); - let vmctx = self.vmctx_val(&mut builder.cursor()); - let func_args = builder - .func - .dfg - .block_params(builder.func.layout.entry_block().unwrap()); - let (count, size) = if func_args.len() < 4 { - return; - } else { - (func_args[2], func_args[3]) - }; - let retval = if retvals.len() < 1 { - return; - } else { - retvals[0] - }; - builder.ins().call(check_calloc, &[vmctx, retval, count, size]); - } - - #[cfg(feature = "wmemcheck")] - fn hook_realloc_exit(&mut self, builder: &mut FunctionBuilder, retvals: &[ir::Value]) { - let check_realloc = self.builtin_functions.check_realloc(builder.func); - let vmctx = self.vmctx_val(&mut builder.cursor()); - let func_args = builder - .func - .dfg - .block_params(builder.func.layout.entry_block().unwrap()); - let (ptr, len) = if func_args.len() < 4 { - return; - } else { - // If a function named `realloc` has at least two arguments, we assume the - // first arguments are the pointer and requested allocation size. - (func_args[2], func_args[3]) - }; - let retval = if retvals.len() < 1 { - return; - } else { - retvals[0] - }; - builder.ins().call(check_realloc, &[vmctx, retval, ptr, len]); - } - - #[cfg(feature = "wmemcheck")] - fn hook_malloc_usable_size_exit(&mut self, builder: &mut FunctionBuilder, retvals: &[ir::Value]) { - let check_malloc_usable_size = self.builtin_functions.check_malloc_usable_size(builder.func); - let vmctx = self.vmctx_val(&mut builder.cursor()); - let func_args = builder - .func - .dfg - .block_params(builder.func.layout.entry_block().unwrap()); - let ptr = if func_args.len() < 3 { - return; - } else { - func_args[2] - }; - let retval = if retvals.len() < 1 { - return; - } else { - retvals[0] - }; - builder.ins().call(check_malloc_usable_size, &[vmctx, retval, ptr]); - } - - #[cfg(feature = "wmemcheck")] - fn hook_posix_memalign_exit(&mut self, builder: &mut FunctionBuilder) { - let check_posix_memalign = self.builtin_functions.check_posix_memalign(builder.func); - let vmctx = self.vmctx_val(&mut builder.cursor()); - let func_args = builder - .func - .dfg - .block_params(builder.func.layout.entry_block().unwrap()); - let (outptr, _alignment, size) = if func_args.len() < 5 { - return; - } else { - // If a function named `malloc` has at least one argument, we assume the - // first argument is the requested allocation size. - (func_args[2], func_args[3], func_args[4]) - }; - builder.ins().call(check_posix_memalign, &[vmctx, outptr, size]); - } - - #[cfg(feature = "wmemcheck")] - fn hook_aligned_alloc(&mut self, builder: &mut FunctionBuilder, retvals: &[ir::Value]) { - let check_malloc = self.builtin_functions.check_malloc(builder.func); - let vmctx = self.vmctx_val(&mut builder.cursor()); - let func_args = builder - .func - .dfg - .block_params(builder.func.layout.entry_block().unwrap()); - let (_alignment, size) = if func_args.len() < 4 { - return; - } else { - // If a function named `malloc` has at least one argument, we assume the - // first argument is the requested allocation size. - (func_args[2], func_args[3]) - }; - let retval = if retvals.len() < 1 { - return; - } else { - retvals[0] - }; - builder.ins().call(check_malloc, &[vmctx, retval, size]); - } - - #[cfg(feature = "wmemcheck")] - fn hook_free_exit(&mut self, builder: &mut FunctionBuilder) { - let check_free = self.builtin_functions.check_free(builder.func); + fn hook_memcheck_exit( + &mut self, + builder: &mut FunctionBuilder, + builtin_memcheck_check: ir::FuncRef, + expected_arg_count: usize, + retvals: &[ir::Value], + ) { let vmctx = self.vmctx_val(&mut builder.cursor()); let func_args = builder .func .dfg .block_params(builder.func.layout.entry_block().unwrap()); - let ptr = if func_args.len() < 3 { + if func_args.len() < expected_arg_count + 2 { + // Assume the first n arguments are the expected ones return; - } else { - // If a function named `free` has at least one argument, we assume the - // first argument is a pointer to memory. - func_args[2] - }; - builder.ins().call(check_free, &[vmctx, ptr]); + } + let mut args = vec![vmctx]; + args.extend_from_slice(&retvals); + args.extend_from_slice(&func_args[2..2 + expected_arg_count]); + builder.ins().call(builtin_memcheck_check, &args); } fn epoch_ptr(&mut self, builder: &mut FunctionBuilder<'_>) -> ir::Value { @@ -1033,8 +909,8 @@ impl<'module_environment> FuncEnvironment<'module_environment> { } #[cfg(feature = "wmemcheck")] - fn hook_memcheck_off(&mut self, builder: &mut FunctionBuilder) { - let memcheck_off = self.builtin_functions.memcheck_off(builder.func); + fn hook_before_allocator(&mut self, builder: &mut FunctionBuilder) { + let memcheck_off = self.builtin_functions.before_allocator(builder.func); let vmctx = self.vmctx_val(&mut builder.cursor()); builder.ins().call(memcheck_off, &[vmctx]); } @@ -3188,20 +3064,14 @@ impl<'module_environment> crate::translate::FuncEnvironment #[cfg(feature = "wmemcheck")] if self.wmemcheck { match self.current_func_name(builder) { - Some("__wrap_malloc") | Some("malloc") => - self.hook_memcheck_off(builder), - Some("__wrap_calloc") | Some("calloc") => - self.hook_memcheck_off(builder), - Some("__wrap_realloc") | Some("realloc") => - self.hook_memcheck_off(builder), - Some("__wrap_malloc_usable_size") | Some("malloc_usable_size") => - self.hook_memcheck_off(builder), - Some("__wrap_posix_memalign") | Some("posix_memalign") => - self.hook_memcheck_off(builder), - Some("__wrap_aligned_alloc") | Some("aligned_alloc") => - self.hook_memcheck_off(builder), - Some("__wrap_free") | Some("free") => - self.hook_memcheck_off(builder), + Some("malloc") | + Some("free") | + Some("calloc") | + Some("realloc") | + Some("posix_memalign") | + Some("aligned_alloc") | + Some("malloc_usable_size") => + self.hook_before_allocator(builder), _ => () } } @@ -3253,20 +3123,34 @@ impl<'module_environment> crate::translate::FuncEnvironment if self.wmemcheck { let name = self.current_func_name(builder); match name { - Some("__wrap_malloc") | Some("malloc") => - self.hook_malloc_exit(builder, retvals), - Some("__wrap_calloc") | Some("calloc") => - self.hook_calloc_exit(builder, retvals), - Some("__wrap_realloc") | Some("realloc") => - self.hook_realloc_exit(builder, retvals), - Some("__wrap_malloc_usable_size") | Some("malloc_usable_size") => - self.hook_malloc_usable_size_exit(builder, retvals), - Some("__wrap_posix_memalign") | Some("posix_memalign") => - self.hook_posix_memalign_exit(builder), - Some("__wrap_aligned_alloc") | Some("aligned_alloc") => - self.hook_aligned_alloc(builder, retvals), - Some("__wrap_free") | Some("free") => - self.hook_free_exit(builder), + Some("malloc") => { + let check_malloc = self.builtin_functions.check_malloc(builder.func); + self.hook_memcheck_exit(builder, check_malloc, 1, retvals) + } + Some("free") => { + let check_free = self.builtin_functions.check_free(builder.func); + self.hook_memcheck_exit(builder, check_free, 1, &[]) + } + Some("calloc") => { + let check_calloc = self.builtin_functions.check_calloc(builder.func); + self.hook_memcheck_exit(builder, check_calloc, 2, retvals) + } + Some("realloc") => { + let check_realloc = self.builtin_functions.check_realloc(builder.func); + self.hook_memcheck_exit(builder, check_realloc, 2, retvals) + } + Some("posix_memalign") => { + let check_posix_memalign = self.builtin_functions.check_posix_memalign(builder.func); + self.hook_memcheck_exit(builder, check_posix_memalign, 3, retvals) + } + Some("aligned_alloc") => { + let check_aligned_alloc = self.builtin_functions.check_aligned_alloc(builder.func); + self.hook_memcheck_exit(builder, check_aligned_alloc, 2, retvals) + } + Some("malloc_usable_size") => { + let check_malloc_usable_size = self.builtin_functions.check_malloc_usable_size(builder.func); + self.hook_memcheck_exit(builder, check_malloc_usable_size, 1, retvals) + } _ => () } } diff --git a/crates/environ/src/builtin.rs b/crates/environ/src/builtin.rs index dd1bd38dc4ae..c8f252959fd6 100644 --- a/crates/environ/src/builtin.rs +++ b/crates/environ/src/builtin.rs @@ -41,24 +41,30 @@ macro_rules! foreach_builtin_function { out_of_gas(vmctx: vmctx); // Invoked when we reach a new epoch. new_epoch(vmctx: vmctx) -> i64; + // Invoked before memory allocation functions are called. + #[cfg(feature = "wmemcheck")] + before_allocator(vmctx: vmctx); // Invoked before malloc returns. #[cfg(feature = "wmemcheck")] check_malloc(vmctx: vmctx, addr: i32, len: i32) -> i32; + // Invoked before the free returns. + #[cfg(feature = "wmemcheck")] + check_free(vmctx: vmctx, addr: i32) -> i32; // Invoked before calloc returns. #[cfg(feature = "wmemcheck")] check_calloc(vmctx: vmctx, addr: i32, count: i32, size: i32) -> i32; // Invoked before realloc returns. #[cfg(feature = "wmemcheck")] check_realloc(vmctx: vmctx, end_addr: i32, start_addr: i32, len: i32) -> i32; - // Invoked before malloc_usable_size returns. - #[cfg(feature = "wmemcheck")] - check_malloc_usable_size(vmctx: vmctx, len: i32, addr: i32) -> i32; // Invoked before posix_memalign returns. #[cfg(feature = "wmemcheck")] - check_posix_memalign(vmctx: vmctx, outptr: i32, size: i32) -> i32; - // Invoked before the free returns. + check_posix_memalign(vmctx: vmctx, outptr: i32, alignment: i32, size: i32) -> i32; + // Invoked before aligned_alloc returns. #[cfg(feature = "wmemcheck")] - check_free(vmctx: vmctx, addr: i32) -> i32; + check_aligned_alloc(vmctx: vmctx, outptr: i32, alignment: i32, size: i32) -> i32; + // Invoked before malloc_usable_size returns. + #[cfg(feature = "wmemcheck")] + check_malloc_usable_size(vmctx: vmctx, len: i32, addr: i32) -> i32; // Invoked before a load is executed. #[cfg(feature = "wmemcheck")] check_load(vmctx: vmctx, num_bytes: i32, addr: i32, offset: i32) -> i32; @@ -72,10 +78,6 @@ macro_rules! foreach_builtin_function { #[cfg(feature = "wmemcheck")] update_mem_size(vmctx: vmctx, num_bytes: i32); - // Invoked before stuff is called. - #[cfg(feature = "wmemcheck")] - memcheck_off(vmctx: vmctx); - // Drop a non-stack GC reference (eg an overwritten table entry) // once it will no longer be used again. (Note: `val` is not of type // `reference` because it needn't appear in any stack maps, as it diff --git a/crates/wasmtime/src/runtime/vm/libcalls.rs b/crates/wasmtime/src/runtime/vm/libcalls.rs index 252c2e6f5d7b..6949b8a52a32 100644 --- a/crates/wasmtime/src/runtime/vm/libcalls.rs +++ b/crates/wasmtime/src/runtime/vm/libcalls.rs @@ -1137,6 +1137,27 @@ unsafe fn check_malloc( Ok(0) } +// Hook for validating free using wmemcheck_state. +#[cfg(feature = "wmemcheck")] +unsafe fn check_free(_store: &mut dyn VMStore, instance: &mut Instance, addr: u32) -> Result { + if let Some(wmemcheck_state) = &mut instance.wmemcheck_state { + let result = wmemcheck_state.free(addr as usize); + wmemcheck_state.memcheck_on(); + match result { + Ok(()) => { + return Ok(0); + } + Err(InvalidFree { addr }) => { + bail!("Invalid free at addr {:#x}", addr) + } + _ => { + panic!("unreachable") + } + } + } + Ok(0) +} + // Hook for validating calloc using wmemcheck_state. #[cfg(feature = "wmemcheck")] unsafe fn check_calloc(store: &mut dyn VMStore, instance: &mut Instance, addr: u32, count: u32, size: u32) -> Result { @@ -1150,17 +1171,10 @@ unsafe fn check_realloc(store: &mut dyn VMStore, instance: &mut Instance, end_ad check_malloc(store, instance, end_addr, len) } -// Hook for validating malloc_usable_size using wmemcheck_state. -#[cfg(feature = "wmemcheck")] -unsafe fn check_malloc_usable_size(store: &mut dyn VMStore, instance: &mut Instance, len: u32, addr: u32) -> Result { - check_free(store, instance, addr)?; - check_malloc(store, instance, addr, len) -} - // Hook for validating posix_memalign using wmemcheck_state. #[cfg(feature = "wmemcheck")] -unsafe fn check_posix_memalign(store: &mut dyn VMStore, instance: &mut Instance, outptr: u32, size: u32) -> Result { +unsafe fn check_posix_memalign(store: &mut dyn VMStore, instance: &mut Instance, outptr: u32, _alignment: u32, size: u32) -> Result { for (_, entry) in instance.exports() { if let wasmtime_environ::EntityIndex::Memory(mem_index) = entry { let mem = instance.get_memory(*mem_index); @@ -1171,49 +1185,18 @@ unsafe fn check_posix_memalign(store: &mut dyn VMStore, instance: &mut Instance, todo!("Why is there no memory?") } -// Hook for validating free using wmemcheck_state. +// Hook for validating aligned_alloc using wmemcheck_state. #[cfg(feature = "wmemcheck")] -unsafe fn check_free(_store: &mut dyn VMStore, instance: &mut Instance, addr: u32) -> Result { - if let Some(wmemcheck_state) = &mut instance.wmemcheck_state { - let result = wmemcheck_state.free(addr as usize); - wmemcheck_state.memcheck_on(); - match result { - Ok(()) => { - return Ok(0); - } - Err(InvalidFree { addr }) => { - bail!("Invalid free at addr {:#x}", addr) - } - _ => { - panic!("unreachable") - } - } - } - Ok(0) +unsafe fn check_aligned_alloc(store: &mut dyn VMStore, instance: &mut Instance, addr: u32, _alignment: u32, size: u32) -> Result { + check_malloc(store, instance, addr, size) } +// Hook for validating malloc_usable_size using wmemcheck_state. #[cfg(feature = "wmemcheck")] -fn log_allocation_previous_to(instance: &mut Instance, addr: usize) { - if let Some(wmemcheck_state) = &mut instance.wmemcheck_state { - if let Some((prev_malloc, prev_len)) = wmemcheck_state.malloc_previous_to(addr) { - println!("previous malloc'd range was {:#x}..{:#x}", prev_malloc, prev_malloc + prev_len); - for (_, entry) in instance.exports() { - if let wasmtime_environ::EntityIndex::Memory(mem_index) = entry { - let mem = instance.get_memory(*mem_index); - for i in 0..prev_len { - if i > 0 && i % 40 == 0 { - println!(); - } - unsafe { - print!("{:02x} ", *mem.base.offset((prev_malloc + i) as isize)); - } - } - println!(); - break - } - } - } - } +unsafe fn check_malloc_usable_size(store: &mut dyn VMStore, instance: &mut Instance, len: u32, addr: u32) -> Result { + // Since the wasm program has checked that the entire allocation is usable, mark it as allocated, similar to realloc + check_free(store, instance, addr)?; + check_malloc(store, instance, addr, len) } // Hook for validating load using wmemcheck_state. @@ -1232,7 +1215,6 @@ fn check_load( return Ok(0); } Err(InvalidRead { addr, len }) => { - log_allocation_previous_to(instance, addr); bail!("Invalid load at addr {:#x} of size {}", addr, len); } Err(OutOfBounds { addr, len }) => { @@ -1262,7 +1244,6 @@ fn check_store( return Ok(0); } Err(InvalidWrite { addr, len }) => { - log_allocation_previous_to(instance, addr); bail!("Invalid store at addr {:#x} of size {}", addr, len) } Err(OutOfBounds { addr, len }) => { @@ -1277,7 +1258,7 @@ fn check_store( } #[cfg(feature = "wmemcheck")] -fn memcheck_off(_store: &mut dyn VMStore, instance: &mut Instance) { +fn before_allocator(_store: &mut dyn VMStore, instance: &mut Instance) { if let Some(wmemcheck_state) = &mut instance.wmemcheck_state { wmemcheck_state.memcheck_off(); } From 7db657fd35acb9e352f3fdd84f713531c3026cb3 Mon Sep 17 00:00:00 2001 From: Andrew Cobb Date: Wed, 20 Nov 2024 15:34:33 -0700 Subject: [PATCH 3/5] more cleanup and consitency --- crates/cranelift/src/func_environ.rs | 8 +- crates/environ/src/builtin.rs | 4 +- crates/wasmtime/src/runtime/vm/instance.rs | 2 +- crates/wasmtime/src/runtime/vm/libcalls.rs | 110 ++++++++++------- crates/wmemcheck/src/lib.rs | 134 ++++++++++++--------- 5 files changed, 149 insertions(+), 109 deletions(-) diff --git a/crates/cranelift/src/func_environ.rs b/crates/cranelift/src/func_environ.rs index 413eecf78be8..1df4b0379bea 100644 --- a/crates/cranelift/src/func_environ.rs +++ b/crates/cranelift/src/func_environ.rs @@ -909,10 +909,10 @@ impl<'module_environment> FuncEnvironment<'module_environment> { } #[cfg(feature = "wmemcheck")] - fn hook_before_allocator(&mut self, builder: &mut FunctionBuilder) { - let memcheck_off = self.builtin_functions.before_allocator(builder.func); + fn check_allocator_start(&mut self, builder: &mut FunctionBuilder) { + let allocator_start = self.builtin_functions.allocator_start(builder.func); let vmctx = self.vmctx_val(&mut builder.cursor()); - builder.ins().call(memcheck_off, &[vmctx]); + builder.ins().call(allocator_start, &[vmctx]); } #[cfg(feature = "wmemcheck")] @@ -3071,7 +3071,7 @@ impl<'module_environment> crate::translate::FuncEnvironment Some("posix_memalign") | Some("aligned_alloc") | Some("malloc_usable_size") => - self.hook_before_allocator(builder), + self.check_allocator_start(builder), _ => () } } diff --git a/crates/environ/src/builtin.rs b/crates/environ/src/builtin.rs index c8f252959fd6..f94a129d7281 100644 --- a/crates/environ/src/builtin.rs +++ b/crates/environ/src/builtin.rs @@ -43,7 +43,7 @@ macro_rules! foreach_builtin_function { new_epoch(vmctx: vmctx) -> i64; // Invoked before memory allocation functions are called. #[cfg(feature = "wmemcheck")] - before_allocator(vmctx: vmctx); + allocator_start(vmctx: vmctx); // Invoked before malloc returns. #[cfg(feature = "wmemcheck")] check_malloc(vmctx: vmctx, addr: i32, len: i32) -> i32; @@ -58,7 +58,7 @@ macro_rules! foreach_builtin_function { check_realloc(vmctx: vmctx, end_addr: i32, start_addr: i32, len: i32) -> i32; // Invoked before posix_memalign returns. #[cfg(feature = "wmemcheck")] - check_posix_memalign(vmctx: vmctx, outptr: i32, alignment: i32, size: i32) -> i32; + check_posix_memalign(vmctx: vmctx, result: i32, outptr: i32, alignment: i32, size: i32) -> i32; // Invoked before aligned_alloc returns. #[cfg(feature = "wmemcheck")] check_aligned_alloc(vmctx: vmctx, outptr: i32, alignment: i32, size: i32) -> i32; diff --git a/crates/wasmtime/src/runtime/vm/instance.rs b/crates/wasmtime/src/runtime/vm/instance.rs index 7dbf4cf85152..d893e57725b2 100644 --- a/crates/wasmtime/src/runtime/vm/instance.rs +++ b/crates/wasmtime/src/runtime/vm/instance.rs @@ -336,7 +336,7 @@ impl Instance { .unwrap_or(0) * 64 * 1024; - Some(Wmemcheck::new(size as usize)) + Some(Wmemcheck::new(size as usize, 4)) } else { None } diff --git a/crates/wasmtime/src/runtime/vm/libcalls.rs b/crates/wasmtime/src/runtime/vm/libcalls.rs index 6949b8a52a32..dae8b959e6d8 100644 --- a/crates/wasmtime/src/runtime/vm/libcalls.rs +++ b/crates/wasmtime/src/runtime/vm/libcalls.rs @@ -63,8 +63,10 @@ use core::time::Duration; use wasmtime_environ::Unsigned; use wasmtime_environ::{DataIndex, ElemIndex, FuncIndex, MemoryIndex, TableIndex, Trap}; #[cfg(feature = "wmemcheck")] -use wasmtime_wmemcheck::AccessError::{ - DoubleMalloc, InvalidFree, InvalidRead, InvalidWrite, OutOfBounds, +use wasmtime_wmemcheck::{ + AccessError, AccessError::{ + DoubleMalloc, InvalidFree, InvalidRead, InvalidRealloc, InvalidWrite, OutOfBounds, + } }; /// Raw functions which are actually called from compiled code. @@ -1108,6 +1110,30 @@ fn new_epoch(store: &mut dyn VMStore, _instance: &mut Instance) -> Result { store.new_epoch() } +#[cfg(feature = "wmemcheck")] +fn check_memcheck_result(result: Result<(), AccessError>) -> Result { + match result { + Ok(()) => { + Ok(0) + } + Err(DoubleMalloc { addr, len }) => { + bail!("Double malloc at addr {:#x} of size {}", addr, len) + } + Err(OutOfBounds { addr, len }) => { + bail!("Malloc out of bounds at addr {:#x} of size {}", addr, len) + } + Err(InvalidFree { addr }) => { + bail!("Invalid free at addr {:#x}", addr) + } + Err(InvalidRealloc { addr }) => { + bail!("Invalid realloc at addr {:#x}", addr) + } + _ => { + panic!("unreachable") + } + } +} + // Hook for validating malloc using wmemcheck_state. #[cfg(feature = "wmemcheck")] unsafe fn check_malloc( @@ -1117,22 +1143,9 @@ unsafe fn check_malloc( len: u32, ) -> Result { if let Some(wmemcheck_state) = &mut instance.wmemcheck_state { - let result = wmemcheck_state.malloc(addr as usize, len as usize); + let result = wmemcheck_state.allocate(addr as usize, len as usize, false); wmemcheck_state.memcheck_on(); - match result { - Ok(()) => { - return Ok(0); - } - Err(DoubleMalloc { addr, len }) => { - bail!("Double malloc at addr {:#x} of size {}", addr, len) - } - Err(OutOfBounds { addr, len }) => { - bail!("Malloc out of bounds at addr {:#x} of size {}", addr, len); - } - _ => { - panic!("unreachable") - } - } + return check_memcheck_result(result); } Ok(0) } @@ -1143,46 +1156,57 @@ unsafe fn check_free(_store: &mut dyn VMStore, instance: &mut Instance, addr: u3 if let Some(wmemcheck_state) = &mut instance.wmemcheck_state { let result = wmemcheck_state.free(addr as usize); wmemcheck_state.memcheck_on(); - match result { - Ok(()) => { - return Ok(0); - } - Err(InvalidFree { addr }) => { - bail!("Invalid free at addr {:#x}", addr) - } - _ => { - panic!("unreachable") - } - } + return check_memcheck_result(result); } Ok(0) } // Hook for validating calloc using wmemcheck_state. #[cfg(feature = "wmemcheck")] -unsafe fn check_calloc(store: &mut dyn VMStore, instance: &mut Instance, addr: u32, count: u32, size: u32) -> Result { - check_malloc(store, instance, addr, count * size) +unsafe fn check_calloc( + _store: &mut dyn VMStore, + instance: &mut Instance, + addr: u32, + count: u32, + size: u32, +) -> Result { + if let Some(wmemcheck_state) = &mut instance.wmemcheck_state { + let result = wmemcheck_state.allocate(addr as usize, (count * size) as usize, true); + wmemcheck_state.memcheck_on(); + return check_memcheck_result(result); + } + Ok(0) } // Hook for validating realloc using wmemcheck_state. #[cfg(feature = "wmemcheck")] -unsafe fn check_realloc(store: &mut dyn VMStore, instance: &mut Instance, end_addr: u32, start_addr: u32, len: u32) -> Result { - check_free(store, instance, start_addr)?; - check_malloc(store, instance, end_addr, len) +unsafe fn check_realloc(_store: &mut dyn VMStore, instance: &mut Instance, end_addr: u32, start_addr: u32, len: u32) -> Result { + if let Some(wmemcheck_state) = &mut instance.wmemcheck_state { + let result = wmemcheck_state.realloc(end_addr as usize, start_addr as usize, len as usize); + wmemcheck_state.memcheck_on(); + return check_memcheck_result(result); + } + Ok(0) } // Hook for validating posix_memalign using wmemcheck_state. #[cfg(feature = "wmemcheck")] -unsafe fn check_posix_memalign(store: &mut dyn VMStore, instance: &mut Instance, outptr: u32, _alignment: u32, size: u32) -> Result { - for (_, entry) in instance.exports() { - if let wasmtime_environ::EntityIndex::Memory(mem_index) = entry { - let mem = instance.get_memory(*mem_index); - let out = *(mem.base.offset(outptr as isize) as *mut u32); - return check_malloc(store, instance, out, size) +unsafe fn check_posix_memalign(store: &mut dyn VMStore, instance: &mut Instance, error: u32, outptr: u32, _alignment: u32, size: u32) -> Result { + if let Some(_) = &mut instance.wmemcheck_state { + if error != 0 { + return Ok(0); } + for (_, entry) in instance.exports() { + if let wasmtime_environ::EntityIndex::Memory(mem_index) = entry { + let mem = instance.get_memory(*mem_index); + let out_ptr = *(mem.base.offset(outptr as isize) as *mut u32); + return check_malloc(store, instance, out_ptr, size); + } + } + todo!("Why is there no memory?") } - todo!("Why is there no memory?") + Ok(0) } // Hook for validating aligned_alloc using wmemcheck_state. @@ -1195,8 +1219,7 @@ unsafe fn check_aligned_alloc(store: &mut dyn VMStore, instance: &mut Instance, #[cfg(feature = "wmemcheck")] unsafe fn check_malloc_usable_size(store: &mut dyn VMStore, instance: &mut Instance, len: u32, addr: u32) -> Result { // Since the wasm program has checked that the entire allocation is usable, mark it as allocated, similar to realloc - check_free(store, instance, addr)?; - check_malloc(store, instance, addr, len) + check_realloc(store, instance, addr, addr, len) } // Hook for validating load using wmemcheck_state. @@ -1257,8 +1280,9 @@ fn check_store( Ok(0) } +// Hook for turning wmemcheck load/store validation off when entering an allocator function. #[cfg(feature = "wmemcheck")] -fn before_allocator(_store: &mut dyn VMStore, instance: &mut Instance) { +fn allocator_start(_store: &mut dyn VMStore, instance: &mut Instance) { if let Some(wmemcheck_state) = &mut instance.wmemcheck_state { wmemcheck_state.memcheck_off(); } diff --git a/crates/wmemcheck/src/lib.rs b/crates/wmemcheck/src/lib.rs index 72ce6aa0adca..13b863718a8e 100644 --- a/crates/wmemcheck/src/lib.rs +++ b/crates/wmemcheck/src/lib.rs @@ -1,5 +1,6 @@ use std::cmp::*; use std::collections::HashMap; +use std::ops::Range; /// Memory checker for wasm guest. pub struct Wmemcheck { @@ -8,23 +9,10 @@ pub struct Wmemcheck { pub stack_pointer: usize, max_stack_size: usize, pub flag: bool, + /// granularity in bytes of tracked allocations + pub granularity: usize, } -impl Wmemcheck { - pub fn malloc_previous_to(&self, addr: usize) -> Option<(usize, usize)> { - let mut best: Option<(usize, usize)> = None; - for (base, len) in self.mallocs.iter() { - if let Some((prev_base, _)) = best { - if prev_base < *base && *base <= addr { - best = Some((*base, *len)); - } - } else { - best = Some((*base, *len)); - } - } - best - } -} /// Error types for memory checker. #[derive(Debug, PartialEq)] pub enum AccessError { @@ -36,6 +24,8 @@ pub enum AccessError { InvalidWrite { addr: usize, len: usize }, /// Free of non-malloc'd pointer. InvalidFree { addr: usize }, + /// Reallocation of non-malloc'd pointer + InvalidRealloc { addr: usize }, /// Access out of bounds of heap or stack. OutOfBounds { addr: usize, len: usize }, } @@ -53,7 +43,8 @@ pub enum MemState { impl Wmemcheck { /// Initializes memory checker instance. - pub fn new(mem_size: usize) -> Wmemcheck { + pub fn new(mem_size: usize, granularity: usize) -> Wmemcheck { + // TODO: metadata could be shrunk when granularity is greater than 1 let metadata = vec![MemState::Unallocated; mem_size]; let mallocs = HashMap::new(); Wmemcheck { @@ -62,21 +53,19 @@ impl Wmemcheck { stack_pointer: 0, max_stack_size: 0, flag: true, + granularity, } } /// Updates memory checker memory state metadata when malloc is called. - pub fn malloc(&mut self, addr: usize, start_len: usize) -> Result<(), AccessError> { - // round up to multiple of 4 - let len = (start_len + 3) & !3; - + pub fn allocate(&mut self, addr: usize, len: usize, initialized: bool) -> Result<(), AccessError> { if !self.is_in_bounds_heap(addr, len) { return Err(AccessError::OutOfBounds { addr: addr, len: len, }); } - for i in addr..addr + len { + for i in self.granular_range(addr..addr + len) { match self.metadata[i] { MemState::ValidToWrite => { return Err(AccessError::DoubleMalloc { @@ -93,13 +82,32 @@ impl Wmemcheck { _ => {} } } - for i in addr..addr + len { - self.metadata[i] = MemState::ValidToWrite; + for i in self.granular_range(addr..addr + len) { + self.metadata[i] = if initialized { MemState::ValidToReadWrite } else { MemState::ValidToWrite }; } self.mallocs.insert(addr, len); Ok(()) } + pub fn realloc(&mut self, end_addr: usize, start_addr: usize, len: usize) -> Result<(), AccessError> { + if start_addr == 0 { + // If ptr is NULL, realloc() is identical to a call to malloc() + return self.allocate(end_addr, len, false); + } + if !self.mallocs.contains_key(&start_addr) { + return Err(AccessError::InvalidRealloc { addr: start_addr }); + } + let start_len = self.mallocs[&start_addr]; + // Copy initialization information from old allocation to new one + let copy_len = start_len.min(len); + let mut copied_metadata: Vec = vec![]; + copied_metadata.extend_from_slice(&self.metadata[start_addr..start_addr + copy_len]); + self.free(start_addr)?; + self.allocate(end_addr, len, false)?; + self.metadata[end_addr..end_addr + copy_len].clone_from_slice(&copied_metadata); + Ok(()) + } + /// Updates memory checker memory state metadata when a load occurs. pub fn read(&mut self, addr: usize, len: usize) -> Result<(), AccessError> { if !self.flag { @@ -119,12 +127,12 @@ impl Wmemcheck { len: len, }); } - /* MemState::ValidToWrite => { + MemState::ValidToWrite => { return Err(AccessError::InvalidRead { addr: addr, len: len, }); - } */ + } _ => {} } } @@ -142,7 +150,7 @@ impl Wmemcheck { len: len, }); } - for i in addr..addr + len { + for i in self.granular_range(addr..addr + len) { if let MemState::Unallocated = self.metadata[i] { return Err(AccessError::InvalidWrite { addr: addr, @@ -150,7 +158,7 @@ impl Wmemcheck { }); } } - for i in addr..addr + len { + for i in self.granular_range(addr..addr + len) { self.metadata[i] = MemState::ValidToReadWrite; } Ok(()) @@ -165,13 +173,13 @@ impl Wmemcheck { return Err(AccessError::InvalidFree { addr: addr }); } let len = self.mallocs[&addr]; - for i in addr..addr + len { + for i in self.granular_range(addr..addr + len) { if let MemState::Unallocated = self.metadata[i] { return Err(AccessError::InvalidFree { addr: addr }); } } self.mallocs.remove(&addr); - for i in addr..addr + len { + for i in self.granular_range(addr..addr + len) { self.metadata[i] = MemState::Unallocated; } Ok(()) @@ -193,11 +201,11 @@ impl Wmemcheck { len: new_sp - self.stack_pointer, }); } else if new_sp < self.stack_pointer { - for i in new_sp..self.stack_pointer + 1 { + for i in self.granular_range(new_sp..self.stack_pointer + 1) { self.metadata[i] = MemState::ValidToReadWrite; } } else { - for i in self.stack_pointer..new_sp { + for i in self.granular_range(self.stack_pointer..new_sp) { self.metadata[i] = MemState::Unallocated; } } @@ -229,14 +237,22 @@ impl Wmemcheck { let to_append = vec![MemState::Unallocated; num_bytes]; self.metadata.extend(to_append); } + + fn granular_range(&self, byte_range: Range) -> Range { + // Round start of range down to granularity + let start = (byte_range.start / self.granularity) * self.granularity; + // Round end of range up to granularity + let end = ((byte_range.end + self.granularity - 1) / self.granularity) * self.granularity; + start..end + } } #[test] fn basic_wmemcheck() { - let mut wmemcheck_state = Wmemcheck::new(640 * 1024); + let mut wmemcheck_state = Wmemcheck::new(640 * 1024, 1); wmemcheck_state.set_stack_size(1024); - assert!(wmemcheck_state.malloc(0x1000, 32).is_ok()); + assert!(wmemcheck_state.allocate(0x1000, 32, false).is_ok()); assert!(wmemcheck_state.write(0x1000, 4).is_ok()); assert!(wmemcheck_state.read(0x1000, 4).is_ok()); assert_eq!(wmemcheck_state.mallocs, HashMap::from([(0x1000, 32)])); @@ -246,9 +262,9 @@ fn basic_wmemcheck() { #[test] fn read_before_initializing() { - let mut wmemcheck_state = Wmemcheck::new(640 * 1024); + let mut wmemcheck_state = Wmemcheck::new(640 * 1024, 1); - assert!(wmemcheck_state.malloc(0x1000, 32).is_ok()); + assert!(wmemcheck_state.allocate(0x1000, 32, false).is_ok()); assert_eq!( wmemcheck_state.read(0x1000, 4), Err(AccessError::InvalidRead { @@ -262,9 +278,9 @@ fn read_before_initializing() { #[test] fn use_after_free() { - let mut wmemcheck_state = Wmemcheck::new(640 * 1024); + let mut wmemcheck_state = Wmemcheck::new(640 * 1024, 1); - assert!(wmemcheck_state.malloc(0x1000, 32).is_ok()); + assert!(wmemcheck_state.allocate(0x1000, 32, false).is_ok()); assert!(wmemcheck_state.write(0x1000, 4).is_ok()); assert!(wmemcheck_state.write(0x1000, 4).is_ok()); assert!(wmemcheck_state.free(0x1000).is_ok()); @@ -279,9 +295,9 @@ fn use_after_free() { #[test] fn double_free() { - let mut wmemcheck_state = Wmemcheck::new(640 * 1024); + let mut wmemcheck_state = Wmemcheck::new(640 * 1024, 1); - assert!(wmemcheck_state.malloc(0x1000, 32).is_ok()); + assert!(wmemcheck_state.allocate(0x1000, 32, false).is_ok()); assert!(wmemcheck_state.write(0x1000, 4).is_ok()); assert!(wmemcheck_state.free(0x1000).is_ok()); assert_eq!( @@ -292,17 +308,17 @@ fn double_free() { #[test] fn out_of_bounds_malloc() { - let mut wmemcheck_state = Wmemcheck::new(640 * 1024); + let mut wmemcheck_state = Wmemcheck::new(640 * 1024, 1); assert_eq!( - wmemcheck_state.malloc(640 * 1024, 1), + wmemcheck_state.allocate(640 * 1024, 1, false), Err(AccessError::OutOfBounds { addr: 640 * 1024, len: 1 }) ); assert_eq!( - wmemcheck_state.malloc(640 * 1024 - 10, 15), + wmemcheck_state.allocate(640 * 1024 - 10, 15, false), Err(AccessError::OutOfBounds { addr: 640 * 1024 - 10, len: 15 @@ -313,9 +329,9 @@ fn out_of_bounds_malloc() { #[test] fn out_of_bounds_read() { - let mut wmemcheck_state = Wmemcheck::new(640 * 1024); + let mut wmemcheck_state = Wmemcheck::new(640 * 1024, 1); - assert!(wmemcheck_state.malloc(640 * 1024 - 24, 24).is_ok()); + assert!(wmemcheck_state.allocate(640 * 1024 - 24, 24, false).is_ok()); assert_eq!( wmemcheck_state.read(640 * 1024 - 24, 25), Err(AccessError::OutOfBounds { @@ -327,18 +343,18 @@ fn out_of_bounds_read() { #[test] fn double_malloc() { - let mut wmemcheck_state = Wmemcheck::new(640 * 1024); + let mut wmemcheck_state = Wmemcheck::new(640 * 1024, 1); - assert!(wmemcheck_state.malloc(0x1000, 32).is_ok()); + assert!(wmemcheck_state.allocate(0x1000, 32, false).is_ok()); assert_eq!( - wmemcheck_state.malloc(0x1000, 32), + wmemcheck_state.allocate(0x1000, 32, false), Err(AccessError::DoubleMalloc { addr: 0x1000, len: 32 }) ); assert_eq!( - wmemcheck_state.malloc(0x1002, 32), + wmemcheck_state.allocate(0x1002, 32, false), Err(AccessError::DoubleMalloc { addr: 0x1002, len: 32 @@ -349,18 +365,18 @@ fn double_malloc() { #[test] fn error_type() { - let mut wmemcheck_state = Wmemcheck::new(640 * 1024); + let mut wmemcheck_state = Wmemcheck::new(640 * 1024, 1); - assert!(wmemcheck_state.malloc(0x1000, 32).is_ok()); + assert!(wmemcheck_state.allocate(0x1000, 32, false).is_ok()); assert_eq!( - wmemcheck_state.malloc(0x1000, 32), + wmemcheck_state.allocate(0x1000, 32, false), Err(AccessError::DoubleMalloc { addr: 0x1000, len: 32 }) ); assert_eq!( - wmemcheck_state.malloc(640 * 1024, 32), + wmemcheck_state.allocate(640 * 1024, 32, false), Err(AccessError::OutOfBounds { addr: 640 * 1024, len: 32 @@ -371,12 +387,12 @@ fn error_type() { #[test] fn update_sp_no_error() { - let mut wmemcheck_state = Wmemcheck::new(640 * 1024); + let mut wmemcheck_state = Wmemcheck::new(640 * 1024, 1); wmemcheck_state.set_stack_size(1024); assert!(wmemcheck_state.update_stack_pointer(768).is_ok()); assert_eq!(wmemcheck_state.stack_pointer, 768); - assert!(wmemcheck_state.malloc(1024 * 2, 32).is_ok()); + assert!(wmemcheck_state.allocate(1024 * 2, 32, false).is_ok()); assert!(wmemcheck_state.free(1024 * 2).is_ok()); assert!(wmemcheck_state.update_stack_pointer(896).is_ok()); assert_eq!(wmemcheck_state.stack_pointer, 896); @@ -385,18 +401,18 @@ fn update_sp_no_error() { #[test] fn bad_stack_malloc() { - let mut wmemcheck_state = Wmemcheck::new(640 * 1024); + let mut wmemcheck_state = Wmemcheck::new(640 * 1024, 1); wmemcheck_state.set_stack_size(1024); assert!(wmemcheck_state.update_stack_pointer(0).is_ok()); assert_eq!(wmemcheck_state.stack_pointer, 0); assert_eq!( - wmemcheck_state.malloc(512, 32), + wmemcheck_state.allocate(512, 32, false), Err(AccessError::OutOfBounds { addr: 512, len: 32 }) ); assert_eq!( - wmemcheck_state.malloc(1022, 32), + wmemcheck_state.allocate(1022, 32, false), Err(AccessError::OutOfBounds { addr: 1022, len: 32 @@ -406,7 +422,7 @@ fn bad_stack_malloc() { #[test] fn stack_full_empty() { - let mut wmemcheck_state = Wmemcheck::new(640 * 1024); + let mut wmemcheck_state = Wmemcheck::new(640 * 1024, 1); wmemcheck_state.set_stack_size(1024); @@ -418,7 +434,7 @@ fn stack_full_empty() { #[test] fn from_test_program() { - let mut wmemcheck_state = Wmemcheck::new(1024 * 1024 * 128); + let mut wmemcheck_state = Wmemcheck::new(1024 * 1024 * 128, 1); wmemcheck_state.set_stack_size(70864); assert!(wmemcheck_state.write(70832, 1).is_ok()); assert!(wmemcheck_state.read(1138, 1).is_ok()); From 4be3c30fbcc9b31593889f1a96a3427648077d94 Mon Sep 17 00:00:00 2001 From: Andrew Cobb Date: Wed, 20 Nov 2024 16:50:15 -0700 Subject: [PATCH 4/5] disable enforcing uninitialized reads --- crates/wasmtime/src/runtime/vm/instance.rs | 2 +- crates/wmemcheck/src/lib.rs | 39 ++++++++++++---------- 2 files changed, 23 insertions(+), 18 deletions(-) diff --git a/crates/wasmtime/src/runtime/vm/instance.rs b/crates/wasmtime/src/runtime/vm/instance.rs index d893e57725b2..137d84287a08 100644 --- a/crates/wasmtime/src/runtime/vm/instance.rs +++ b/crates/wasmtime/src/runtime/vm/instance.rs @@ -336,7 +336,7 @@ impl Instance { .unwrap_or(0) * 64 * 1024; - Some(Wmemcheck::new(size as usize, 4)) + Some(Wmemcheck::new(size as usize, 4, false)) } else { None } diff --git a/crates/wmemcheck/src/lib.rs b/crates/wmemcheck/src/lib.rs index 13b863718a8e..b20bd62b9e05 100644 --- a/crates/wmemcheck/src/lib.rs +++ b/crates/wmemcheck/src/lib.rs @@ -11,6 +11,7 @@ pub struct Wmemcheck { pub flag: bool, /// granularity in bytes of tracked allocations pub granularity: usize, + pub enforce_uninitialized_reads: bool, } /// Error types for memory checker. @@ -43,7 +44,8 @@ pub enum MemState { impl Wmemcheck { /// Initializes memory checker instance. - pub fn new(mem_size: usize, granularity: usize) -> Wmemcheck { + // TODO: how to make this properly configurable? + pub fn new(mem_size: usize, granularity: usize, enforce_uninitialized_reads: bool) -> Wmemcheck { // TODO: metadata could be shrunk when granularity is greater than 1 let metadata = vec![MemState::Unallocated; mem_size]; let mallocs = HashMap::new(); @@ -54,6 +56,7 @@ impl Wmemcheck { max_stack_size: 0, flag: true, granularity, + enforce_uninitialized_reads, } } @@ -128,10 +131,12 @@ impl Wmemcheck { }); } MemState::ValidToWrite => { - return Err(AccessError::InvalidRead { - addr: addr, - len: len, - }); + if self.enforce_uninitialized_reads { + return Err(AccessError::InvalidRead { + addr: addr, + len: len, + }); + } } _ => {} } @@ -249,7 +254,7 @@ impl Wmemcheck { #[test] fn basic_wmemcheck() { - let mut wmemcheck_state = Wmemcheck::new(640 * 1024, 1); + let mut wmemcheck_state = Wmemcheck::new(640 * 1024, 1, true); wmemcheck_state.set_stack_size(1024); assert!(wmemcheck_state.allocate(0x1000, 32, false).is_ok()); @@ -262,7 +267,7 @@ fn basic_wmemcheck() { #[test] fn read_before_initializing() { - let mut wmemcheck_state = Wmemcheck::new(640 * 1024, 1); + let mut wmemcheck_state = Wmemcheck::new(640 * 1024, 1, true); assert!(wmemcheck_state.allocate(0x1000, 32, false).is_ok()); assert_eq!( @@ -278,7 +283,7 @@ fn read_before_initializing() { #[test] fn use_after_free() { - let mut wmemcheck_state = Wmemcheck::new(640 * 1024, 1); + let mut wmemcheck_state = Wmemcheck::new(640 * 1024, 1, true); assert!(wmemcheck_state.allocate(0x1000, 32, false).is_ok()); assert!(wmemcheck_state.write(0x1000, 4).is_ok()); @@ -295,7 +300,7 @@ fn use_after_free() { #[test] fn double_free() { - let mut wmemcheck_state = Wmemcheck::new(640 * 1024, 1); + let mut wmemcheck_state = Wmemcheck::new(640 * 1024, 1, true); assert!(wmemcheck_state.allocate(0x1000, 32, false).is_ok()); assert!(wmemcheck_state.write(0x1000, 4).is_ok()); @@ -308,7 +313,7 @@ fn double_free() { #[test] fn out_of_bounds_malloc() { - let mut wmemcheck_state = Wmemcheck::new(640 * 1024, 1); + let mut wmemcheck_state = Wmemcheck::new(640 * 1024, 1, true); assert_eq!( wmemcheck_state.allocate(640 * 1024, 1, false), @@ -329,7 +334,7 @@ fn out_of_bounds_malloc() { #[test] fn out_of_bounds_read() { - let mut wmemcheck_state = Wmemcheck::new(640 * 1024, 1); + let mut wmemcheck_state = Wmemcheck::new(640 * 1024, 1, true); assert!(wmemcheck_state.allocate(640 * 1024 - 24, 24, false).is_ok()); assert_eq!( @@ -343,7 +348,7 @@ fn out_of_bounds_read() { #[test] fn double_malloc() { - let mut wmemcheck_state = Wmemcheck::new(640 * 1024, 1); + let mut wmemcheck_state = Wmemcheck::new(640 * 1024, 1, true); assert!(wmemcheck_state.allocate(0x1000, 32, false).is_ok()); assert_eq!( @@ -365,7 +370,7 @@ fn double_malloc() { #[test] fn error_type() { - let mut wmemcheck_state = Wmemcheck::new(640 * 1024, 1); + let mut wmemcheck_state = Wmemcheck::new(640 * 1024, 1, true); assert!(wmemcheck_state.allocate(0x1000, 32, false).is_ok()); assert_eq!( @@ -387,7 +392,7 @@ fn error_type() { #[test] fn update_sp_no_error() { - let mut wmemcheck_state = Wmemcheck::new(640 * 1024, 1); + let mut wmemcheck_state = Wmemcheck::new(640 * 1024, 1, true); wmemcheck_state.set_stack_size(1024); assert!(wmemcheck_state.update_stack_pointer(768).is_ok()); @@ -401,7 +406,7 @@ fn update_sp_no_error() { #[test] fn bad_stack_malloc() { - let mut wmemcheck_state = Wmemcheck::new(640 * 1024, 1); + let mut wmemcheck_state = Wmemcheck::new(640 * 1024, 1, true); wmemcheck_state.set_stack_size(1024); @@ -422,7 +427,7 @@ fn bad_stack_malloc() { #[test] fn stack_full_empty() { - let mut wmemcheck_state = Wmemcheck::new(640 * 1024, 1); + let mut wmemcheck_state = Wmemcheck::new(640 * 1024, 1, true); wmemcheck_state.set_stack_size(1024); @@ -434,7 +439,7 @@ fn stack_full_empty() { #[test] fn from_test_program() { - let mut wmemcheck_state = Wmemcheck::new(1024 * 1024 * 128, 1); + let mut wmemcheck_state = Wmemcheck::new(1024 * 1024 * 128, 1, true); wmemcheck_state.set_stack_size(70864); assert!(wmemcheck_state.write(70832, 1).is_ok()); assert!(wmemcheck_state.read(1138, 1).is_ok()); From d2efd40b161808751d9bcbad0df9e426c83f860b Mon Sep 17 00:00:00 2001 From: Andrew Cobb Date: Wed, 20 Nov 2024 20:10:11 -0700 Subject: [PATCH 5/5] formatting --- crates/cranelift/src/func_environ.rs | 29 ++++++++------- crates/wasmtime/src/runtime/vm/libcalls.rs | 42 ++++++++++++++++------ crates/wmemcheck/src/lib.rs | 26 +++++++++++--- 3 files changed, 70 insertions(+), 27 deletions(-) diff --git a/crates/cranelift/src/func_environ.rs b/crates/cranelift/src/func_environ.rs index 1df4b0379bea..896a55211ca1 100644 --- a/crates/cranelift/src/func_environ.rs +++ b/crates/cranelift/src/func_environ.rs @@ -3064,15 +3064,14 @@ impl<'module_environment> crate::translate::FuncEnvironment #[cfg(feature = "wmemcheck")] if self.wmemcheck { match self.current_func_name(builder) { - Some("malloc") | - Some("free") | - Some("calloc") | - Some("realloc") | - Some("posix_memalign") | - Some("aligned_alloc") | - Some("malloc_usable_size") => - self.check_allocator_start(builder), - _ => () + Some("malloc") + | Some("free") + | Some("calloc") + | Some("realloc") + | Some("posix_memalign") + | Some("aligned_alloc") + | Some("malloc_usable_size") => self.check_allocator_start(builder), + _ => (), } } @@ -3140,18 +3139,22 @@ impl<'module_environment> crate::translate::FuncEnvironment self.hook_memcheck_exit(builder, check_realloc, 2, retvals) } Some("posix_memalign") => { - let check_posix_memalign = self.builtin_functions.check_posix_memalign(builder.func); + let check_posix_memalign = + self.builtin_functions.check_posix_memalign(builder.func); self.hook_memcheck_exit(builder, check_posix_memalign, 3, retvals) } Some("aligned_alloc") => { - let check_aligned_alloc = self.builtin_functions.check_aligned_alloc(builder.func); + let check_aligned_alloc = + self.builtin_functions.check_aligned_alloc(builder.func); self.hook_memcheck_exit(builder, check_aligned_alloc, 2, retvals) } Some("malloc_usable_size") => { - let check_malloc_usable_size = self.builtin_functions.check_malloc_usable_size(builder.func); + let check_malloc_usable_size = self + .builtin_functions + .check_malloc_usable_size(builder.func); self.hook_memcheck_exit(builder, check_malloc_usable_size, 1, retvals) } - _ => () + _ => (), } } } diff --git a/crates/wasmtime/src/runtime/vm/libcalls.rs b/crates/wasmtime/src/runtime/vm/libcalls.rs index dae8b959e6d8..728193a3a332 100644 --- a/crates/wasmtime/src/runtime/vm/libcalls.rs +++ b/crates/wasmtime/src/runtime/vm/libcalls.rs @@ -64,9 +64,10 @@ use wasmtime_environ::Unsigned; use wasmtime_environ::{DataIndex, ElemIndex, FuncIndex, MemoryIndex, TableIndex, Trap}; #[cfg(feature = "wmemcheck")] use wasmtime_wmemcheck::{ - AccessError, AccessError::{ + AccessError, + AccessError::{ DoubleMalloc, InvalidFree, InvalidRead, InvalidRealloc, InvalidWrite, OutOfBounds, - } + }, }; /// Raw functions which are actually called from compiled code. @@ -1113,9 +1114,7 @@ fn new_epoch(store: &mut dyn VMStore, _instance: &mut Instance) -> Result { #[cfg(feature = "wmemcheck")] fn check_memcheck_result(result: Result<(), AccessError>) -> Result { match result { - Ok(()) => { - Ok(0) - } + Ok(()) => Ok(0), Err(DoubleMalloc { addr, len }) => { bail!("Double malloc at addr {:#x} of size {}", addr, len) } @@ -1180,7 +1179,13 @@ unsafe fn check_calloc( // Hook for validating realloc using wmemcheck_state. #[cfg(feature = "wmemcheck")] -unsafe fn check_realloc(_store: &mut dyn VMStore, instance: &mut Instance, end_addr: u32, start_addr: u32, len: u32) -> Result { +unsafe fn check_realloc( + _store: &mut dyn VMStore, + instance: &mut Instance, + end_addr: u32, + start_addr: u32, + len: u32, +) -> Result { if let Some(wmemcheck_state) = &mut instance.wmemcheck_state { let result = wmemcheck_state.realloc(end_addr as usize, start_addr as usize, len as usize); wmemcheck_state.memcheck_on(); @@ -1189,10 +1194,16 @@ unsafe fn check_realloc(_store: &mut dyn VMStore, instance: &mut Instance, end_a Ok(0) } - // Hook for validating posix_memalign using wmemcheck_state. #[cfg(feature = "wmemcheck")] -unsafe fn check_posix_memalign(store: &mut dyn VMStore, instance: &mut Instance, error: u32, outptr: u32, _alignment: u32, size: u32) -> Result { +unsafe fn check_posix_memalign( + store: &mut dyn VMStore, + instance: &mut Instance, + error: u32, + outptr: u32, + _alignment: u32, + size: u32, +) -> Result { if let Some(_) = &mut instance.wmemcheck_state { if error != 0 { return Ok(0); @@ -1211,13 +1222,24 @@ unsafe fn check_posix_memalign(store: &mut dyn VMStore, instance: &mut Instance, // Hook for validating aligned_alloc using wmemcheck_state. #[cfg(feature = "wmemcheck")] -unsafe fn check_aligned_alloc(store: &mut dyn VMStore, instance: &mut Instance, addr: u32, _alignment: u32, size: u32) -> Result { +unsafe fn check_aligned_alloc( + store: &mut dyn VMStore, + instance: &mut Instance, + addr: u32, + _alignment: u32, + size: u32, +) -> Result { check_malloc(store, instance, addr, size) } // Hook for validating malloc_usable_size using wmemcheck_state. #[cfg(feature = "wmemcheck")] -unsafe fn check_malloc_usable_size(store: &mut dyn VMStore, instance: &mut Instance, len: u32, addr: u32) -> Result { +unsafe fn check_malloc_usable_size( + store: &mut dyn VMStore, + instance: &mut Instance, + len: u32, + addr: u32, +) -> Result { // Since the wasm program has checked that the entire allocation is usable, mark it as allocated, similar to realloc check_realloc(store, instance, addr, addr, len) } diff --git a/crates/wmemcheck/src/lib.rs b/crates/wmemcheck/src/lib.rs index b20bd62b9e05..d991920d94db 100644 --- a/crates/wmemcheck/src/lib.rs +++ b/crates/wmemcheck/src/lib.rs @@ -45,7 +45,11 @@ pub enum MemState { impl Wmemcheck { /// Initializes memory checker instance. // TODO: how to make this properly configurable? - pub fn new(mem_size: usize, granularity: usize, enforce_uninitialized_reads: bool) -> Wmemcheck { + pub fn new( + mem_size: usize, + granularity: usize, + enforce_uninitialized_reads: bool, + ) -> Wmemcheck { // TODO: metadata could be shrunk when granularity is greater than 1 let metadata = vec![MemState::Unallocated; mem_size]; let mallocs = HashMap::new(); @@ -61,7 +65,12 @@ impl Wmemcheck { } /// Updates memory checker memory state metadata when malloc is called. - pub fn allocate(&mut self, addr: usize, len: usize, initialized: bool) -> Result<(), AccessError> { + pub fn allocate( + &mut self, + addr: usize, + len: usize, + initialized: bool, + ) -> Result<(), AccessError> { if !self.is_in_bounds_heap(addr, len) { return Err(AccessError::OutOfBounds { addr: addr, @@ -86,13 +95,22 @@ impl Wmemcheck { } } for i in self.granular_range(addr..addr + len) { - self.metadata[i] = if initialized { MemState::ValidToReadWrite } else { MemState::ValidToWrite }; + self.metadata[i] = if initialized { + MemState::ValidToReadWrite + } else { + MemState::ValidToWrite + }; } self.mallocs.insert(addr, len); Ok(()) } - pub fn realloc(&mut self, end_addr: usize, start_addr: usize, len: usize) -> Result<(), AccessError> { + pub fn realloc( + &mut self, + end_addr: usize, + start_addr: usize, + len: usize, + ) -> Result<(), AccessError> { if start_addr == 0 { // If ptr is NULL, realloc() is identical to a call to malloc() return self.allocate(end_addr, len, false);