From fdeb78bc04f7c29b32771011bbfc05f19ddd5c24 Mon Sep 17 00:00:00 2001 From: Zijie Zhao Date: Fri, 30 Jan 2026 18:08:05 -0600 Subject: [PATCH] Fix leaks of already-initialized fields when struct/array init fails When `StructRef::new` or `ArrayRef::new_fixed` fails midway, `dealloc_uninit` frees the object without dec-refing GC refs already written to earlier fields, leaking them. Fix by zero-filling the object body in `alloc_raw` so trace_gc_ref skips uninitialized slots, then tracing and dec-refing children in `dealloc_uninit` before freeing. Fixes #12456 --- crates/wasmtime/src/runtime/vm/gc.rs | 9 +- .../wasmtime/src/runtime/vm/gc/enabled/drc.rs | 35 ++++- .../src/runtime/vm/gc/enabled/null.rs | 14 +- .../wasmtime/src/runtime/vm/gc/gc_runtime.rs | 12 +- tests/all/gc.rs | 128 ++++++++++++++++++ 5 files changed, 188 insertions(+), 10 deletions(-) diff --git a/crates/wasmtime/src/runtime/vm/gc.rs b/crates/wasmtime/src/runtime/vm/gc.rs index 6067b565d577..796949a3fd49 100644 --- a/crates/wasmtime/src/runtime/vm/gc.rs +++ b/crates/wasmtime/src/runtime/vm/gc.rs @@ -252,7 +252,8 @@ impl GcStore { /// Deallocate an uninitialized struct. pub fn dealloc_uninit_struct(&mut self, structref: VMStructRef) { - self.gc_heap.dealloc_uninit_struct_or_exn(structref.into()) + self.gc_heap + .dealloc_uninit_struct_or_exn(&mut self.host_data_table, structref.into()) } /// Get the data for the given object reference. @@ -291,7 +292,8 @@ impl GcStore { /// Deallocate an uninitialized array. pub fn dealloc_uninit_array(&mut self, arrayref: VMArrayRef) { - self.gc_heap.dealloc_uninit_array(arrayref); + self.gc_heap + .dealloc_uninit_array(&mut self.host_data_table, arrayref); } /// Get the length of the given array. @@ -318,6 +320,7 @@ impl GcStore { /// Deallocate an uninitialized exception object. pub fn dealloc_uninit_exn(&mut self, exnref: VMExnRef) { - self.gc_heap.dealloc_uninit_struct_or_exn(exnref.into()); + self.gc_heap + .dealloc_uninit_struct_or_exn(&mut self.host_data_table, exnref.into()); } } diff --git a/crates/wasmtime/src/runtime/vm/gc/enabled/drc.rs b/crates/wasmtime/src/runtime/vm/gc/enabled/drc.rs index 94bc99abb9f1..2a8df6cb1033 100644 --- a/crates/wasmtime/src/runtime/vm/gc/enabled/drc.rs +++ b/crates/wasmtime/src/runtime/vm/gc/enabled/drc.rs @@ -834,6 +834,16 @@ unsafe impl GcHeap for DrcHeap { next_over_approximated_stack_root: None, object_size, }; + + // Zero-fill past the header so `dealloc_uninit_*` can safely + // trace partially-initialized objects. + let header_size = core::mem::size_of::(); + if layout.size() > header_size { + let start = gc_ref.as_heap_index().unwrap().get(); + let start = usize::try_from(start).unwrap(); + self.heap_slice_mut()[start + header_size..start + layout.size()].fill(0); + } + log::trace!("new object: increment {gc_ref:#p} ref count -> 1"); Ok(Ok(gc_ref)) } @@ -857,7 +867,16 @@ unsafe impl GcHeap for DrcHeap { Ok(Ok(gc_ref)) } - fn dealloc_uninit_struct_or_exn(&mut self, gcref: VMGcRef) { + fn dealloc_uninit_struct_or_exn( + &mut self, + host_data_table: &mut ExternRefHostDataTable, + gcref: VMGcRef, + ) { + let mut children = Vec::new(); + self.trace_gc_ref(&gcref, &mut children); + for child in &children { + self.dec_ref_and_maybe_dealloc(host_data_table, child); + } self.dealloc(gcref); } @@ -881,8 +900,18 @@ unsafe impl GcHeap for DrcHeap { Ok(Ok(gc_ref.into_arrayref_unchecked())) } - fn dealloc_uninit_array(&mut self, arrayref: VMArrayRef) { - self.dealloc(arrayref.into()) + fn dealloc_uninit_array( + &mut self, + host_data_table: &mut ExternRefHostDataTable, + arrayref: VMArrayRef, + ) { + let gc_ref: VMGcRef = arrayref.into(); + let mut children = Vec::new(); + self.trace_gc_ref(&gc_ref, &mut children); + for child in &children { + self.dec_ref_and_maybe_dealloc(host_data_table, child); + } + self.dealloc(gc_ref); } fn array_len(&self, arrayref: &VMArrayRef) -> u32 { diff --git a/crates/wasmtime/src/runtime/vm/gc/enabled/null.rs b/crates/wasmtime/src/runtime/vm/gc/enabled/null.rs index b71d56979fdd..ac677c47e9f0 100644 --- a/crates/wasmtime/src/runtime/vm/gc/enabled/null.rs +++ b/crates/wasmtime/src/runtime/vm/gc/enabled/null.rs @@ -299,7 +299,12 @@ unsafe impl GcHeap for NullHeap { self.alloc(VMGcHeader::from_kind_and_index(kind, ty), layout.layout()) } - fn dealloc_uninit_struct_or_exn(&mut self, _struct_ref: VMGcRef) {} + fn dealloc_uninit_struct_or_exn( + &mut self, + _host_data_table: &mut ExternRefHostDataTable, + _struct_ref: VMGcRef, + ) { + } fn alloc_uninit_array( &mut self, @@ -320,7 +325,12 @@ unsafe impl GcHeap for NullHeap { }) } - fn dealloc_uninit_array(&mut self, _array_ref: VMArrayRef) {} + fn dealloc_uninit_array( + &mut self, + _host_data_table: &mut ExternRefHostDataTable, + _array_ref: VMArrayRef, + ) { + } fn array_len(&self, arrayref: &VMArrayRef) -> u32 { let arrayref = VMNullArrayHeader::typed_ref(self, arrayref); diff --git a/crates/wasmtime/src/runtime/vm/gc/gc_runtime.rs b/crates/wasmtime/src/runtime/vm/gc/gc_runtime.rs index 2808b46fc89c..c60f388b469f 100644 --- a/crates/wasmtime/src/runtime/vm/gc/gc_runtime.rs +++ b/crates/wasmtime/src/runtime/vm/gc/gc_runtime.rs @@ -302,7 +302,11 @@ pub unsafe trait GcHeap: 'static + Send + Sync { /// that the struct's allocation can be eagerly reclaimed, and so that the /// collector doesn't attempt to treat any of the uninitialized fields as /// valid GC references, or something like that. - fn dealloc_uninit_struct_or_exn(&mut self, structref: VMGcRef); + fn dealloc_uninit_struct_or_exn( + &mut self, + host_data_table: &mut ExternRefHostDataTable, + structref: VMGcRef, + ); /// * `Ok(Ok(_))`: The allocation was successful. /// @@ -328,7 +332,11 @@ pub unsafe trait GcHeap: 'static + Send + Sync { /// that the array's allocation can be eagerly reclaimed, and so that the /// collector doesn't attempt to treat any of the uninitialized fields as /// valid GC references, or something like that. - fn dealloc_uninit_array(&mut self, arrayref: VMArrayRef); + fn dealloc_uninit_array( + &mut self, + host_data_table: &mut ExternRefHostDataTable, + arrayref: VMArrayRef, + ); /// Get the length of the given array. /// diff --git a/tests/all/gc.rs b/tests/all/gc.rs index 55b6e9e8b5b6..f7bdd47421c4 100644 --- a/tests/all/gc.rs +++ b/tests/all/gc.rs @@ -1800,3 +1800,131 @@ fn array_init_elem_oom() -> Result<()> { Ok(()) } + +#[test] +fn struct_new_init_failure_no_leak() -> Result<()> { + let mut store = crate::gc_store()?; + let ty = StructType::new( + store.engine(), + [ + FieldType::new(Mutability::Var, StorageType::ValType(ValType::EXTERNREF)), + FieldType::new(Mutability::Var, StorageType::ValType(ValType::EXTERNREF)), + ], + )?; + let pre = StructRefPre::new(&mut store, ty); + let dropped = Arc::new(AtomicBool::new(false)); + { + let mut scope = RootScope::new(&mut store); + let good = ExternRef::new(&mut scope, SetFlagOnDrop(dropped.clone()))?; + // Create an unrooted ref by letting its scope expire. + let bad = { + let mut tmp = RootScope::new(&mut scope); + ExternRef::new(&mut tmp, 0u32)? + }; + assert!( + StructRef::new( + &mut scope, + &pre, + &[Val::ExternRef(Some(good)), Val::ExternRef(Some(bad))], + ) + .is_err() + ); + } + let _ = store.gc(None); + assert!(dropped.load(SeqCst), "field 0 externref was leaked"); + Ok(()) +} + +#[test] +fn array_new_fixed_init_failure_no_leak() -> Result<()> { + let mut store = crate::gc_store()?; + let ty = ArrayType::new( + store.engine(), + FieldType::new(Mutability::Var, StorageType::ValType(ValType::EXTERNREF)), + ); + let pre = ArrayRefPre::new(&mut store, ty); + let dropped = Arc::new(AtomicBool::new(false)); + { + let mut scope = RootScope::new(&mut store); + let good = ExternRef::new(&mut scope, SetFlagOnDrop(dropped.clone()))?; + // Create an unrooted ref by letting its scope expire. + let bad = { + let mut tmp = RootScope::new(&mut scope); + ExternRef::new(&mut tmp, 0u32)? + }; + assert!( + ArrayRef::new_fixed( + &mut scope, + &pre, + &[Val::ExternRef(Some(good)), Val::ExternRef(Some(bad))], + ) + .is_err() + ); + } + let _ = store.gc(None); + assert!(dropped.load(SeqCst), "element 0 externref was leaked"); + Ok(()) +} + +#[test] +fn zero_fill_prevents_stale_gc_ref_in_dealloc_uninit() -> Result<()> { + // Verify that alloc_raw zero-fills the object body. Without zero-fill, + // stale GC-ref bytes from a prior allocation would cause dealloc_uninit + // to spuriously dec-ref a still-live object. + let mut store = crate::gc_store()?; + let ty = StructType::new( + store.engine(), + [ + FieldType::new(Mutability::Var, StorageType::ValType(ValType::EXTERNREF)), + FieldType::new(Mutability::Var, StorageType::ValType(ValType::EXTERNREF)), + ], + )?; + let pre = StructRefPre::new(&mut store, ty); + let dropped = Arc::new(AtomicBool::new(false)); + + // Create E, put it in a struct, keep E alive via OwnedRooted. + let e_owned = { + let mut scope = RootScope::new(&mut store); + let e = ExternRef::new(&mut scope, SetFlagOnDrop(dropped.clone()))?; + let e_owned = e.to_owned_rooted(&mut scope)?; + let filler = ExternRef::new(&mut scope, 0u32)?; + let _s = StructRef::new( + &mut scope, + &pre, + &[Val::ExternRef(Some(e)), Val::ExternRef(Some(filler))], + )?; + e_owned + }; + let _ = store.gc(None); + + assert!(!dropped.load(SeqCst), "E should still be alive after GC"); + + // Reuse same heap region, fail on field 0 so nothing is initialized. + { + let mut scope = RootScope::new(&mut store); + let bad = { + let mut tmp = RootScope::new(&mut scope); + ExternRef::new(&mut tmp, 0u32)? + }; + assert!( + StructRef::new( + &mut scope, + &pre, + &[Val::ExternRef(Some(bad)), Val::ExternRef(None)], + ) + .is_err() + ); + } + + // Without zero-fill the spurious dec-ref would have freed E. + assert!(!dropped.load(SeqCst), "alloc_raw did not zero-fill"); + + // Release E and verify it gets collected. + drop(e_owned); + let _ = store.gc(None); + assert!( + dropped.load(SeqCst), + "E should be freed after dropping owned root" + ); + Ok(()) +}