Skip to content

Commit 0096013

Browse files
authored
Use bump allocation in DRC free list and other improvements (#12969)
* Use bump allocation in DRC free list and other improvements Also add fast-path entry points that take a `u32` size directly that has already been rounded to the free list's alignment. Altogether, this shaves off ~309B instructions retired (48%) from the benchmark in #11141 * Address review feedback
1 parent d248736 commit 0096013

File tree

3 files changed

+248
-123
lines changed

3 files changed

+248
-123
lines changed
Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,7 @@
1+
# Seeds for failure cases proptest has generated in the past. It is
2+
# automatically read and these particular cases re-run before any
3+
# novel cases are generated.
4+
#
5+
# It is recommended to check this file in to source control so that
6+
# everyone who runs the test benefits from these saved cases.
7+
cc b26e69fbaf46deb79652859039538e422818fd40b9afff63faa7aacbddecfd3d # shrinks to (capacity, ops) = (219544665809630458, [(10, Alloc(Layout { size: 193045289231815352, align: 8 (1 << 3) })), (10, Dealloc(Layout { size: 193045289231815352, align: 8 (1 << 3) }))])

crates/wasmtime/src/runtime/vm/gc/enabled/drc.rs

Lines changed: 11 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -176,17 +176,21 @@ impl DrcHeap {
176176

177177
fn dealloc(&mut self, gc_ref: VMGcRef) {
178178
let drc_ref = drc_ref(&gc_ref);
179-
let size = self.index(drc_ref).object_size();
180-
let layout = FreeList::layout(size);
179+
let size = self.index(drc_ref).object_size;
180+
let alloc_size = FreeList::aligned_size(size);
181181
let index = gc_ref.as_heap_index().unwrap();
182182

183183
// Poison the freed memory so that any stale access is detectable.
184184
if cfg!(gc_zeal) {
185185
let index = usize::try_from(index.get()).unwrap();
186-
self.heap_slice_mut()[index..][..layout.size()].fill(POISON);
186+
let alloc_size = usize::try_from(alloc_size).unwrap();
187+
self.heap_slice_mut()[index..][..alloc_size].fill(POISON);
187188
}
188189

189-
self.free_list.as_mut().unwrap().dealloc(index, layout);
190+
self.free_list
191+
.as_mut()
192+
.unwrap()
193+
.dealloc_fast(index, alloc_size);
190194
}
191195

192196
/// Increment the ref count for the associated object.
@@ -920,6 +924,7 @@ unsafe impl GcHeap for DrcHeap {
920924
fn alloc_raw(&mut self, header: VMGcHeader, layout: Layout) -> Result<Result<VMGcRef, u64>> {
921925
debug_assert!(layout.size() >= core::mem::size_of::<VMDrcHeader>());
922926
debug_assert!(layout.align() >= core::mem::align_of::<VMDrcHeader>());
927+
debug_assert!(FreeList::can_align_to(layout.align()));
923928
debug_assert_eq!(header.reserved_u26(), 0);
924929

925930
// We must have trace info for every GC type that we allocate in this
@@ -933,8 +938,9 @@ unsafe impl GcHeap for DrcHeap {
933938
}
934939

935940
let object_size = u32::try_from(layout.size()).unwrap();
941+
let alloc_size = FreeList::aligned_size(object_size);
936942

937-
let gc_ref = match self.free_list.as_mut().unwrap().alloc(layout)? {
943+
let gc_ref = match self.free_list.as_mut().unwrap().alloc_fast(alloc_size) {
938944
None => return Ok(Err(u64::try_from(layout.size()).unwrap())),
939945
Some(index) => VMGcRef::from_heap_index(index).unwrap(),
940946
};

0 commit comments

Comments
 (0)