Skip to content

Commit 14c182c

Browse files
committed
Use bump allocation in DRC free list and other improvements
* Avoid using a `BTreeMap` and use a cache-friendly `Vec` instead. * When merging blocks in the free list, use linear search, only falling back to binary search when the free list is large. * Add fast-path entry points that take a `u32` size directly that has already been rounded to the free list's alignment. Altogether, this shaves off ~309B instructions retired (48%) from the benchmark in #11141
1 parent eb4c527 commit 14c182c

3 files changed

Lines changed: 287 additions & 140 deletions

File tree

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,7 @@
1+
# Seeds for failure cases proptest has generated in the past. It is
2+
# automatically read and these particular cases re-run before any
3+
# novel cases are generated.
4+
#
5+
# It is recommended to check this file in to source control so that
6+
# everyone who runs the test benefits from these saved cases.
7+
cc b26e69fbaf46deb79652859039538e422818fd40b9afff63faa7aacbddecfd3d # shrinks to (capacity, ops) = (219544665809630458, [(10, Alloc(Layout { size: 193045289231815352, align: 8 (1 << 3) })), (10, Dealloc(Layout { size: 193045289231815352, align: 8 (1 << 3) }))])

crates/wasmtime/src/runtime/vm/gc/enabled/drc.rs

Lines changed: 11 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -176,17 +176,21 @@ impl DrcHeap {
176176

177177
fn dealloc(&mut self, gc_ref: VMGcRef) {
178178
let drc_ref = drc_ref(&gc_ref);
179-
let size = self.index(drc_ref).object_size();
180-
let layout = FreeList::layout(size);
179+
let size = self.index(drc_ref).object_size;
180+
let alloc_size = FreeList::aligned_size(size);
181181
let index = gc_ref.as_heap_index().unwrap();
182182

183183
// Poison the freed memory so that any stale access is detectable.
184184
if cfg!(gc_zeal) {
185185
let index = usize::try_from(index.get()).unwrap();
186-
self.heap_slice_mut()[index..][..layout.size()].fill(POISON);
186+
let alloc_size = usize::try_from(alloc_size).unwrap();
187+
self.heap_slice_mut()[index..][..alloc_size].fill(POISON);
187188
}
188189

189-
self.free_list.as_mut().unwrap().dealloc(index, layout);
190+
self.free_list
191+
.as_mut()
192+
.unwrap()
193+
.dealloc_fast(index, alloc_size);
190194
}
191195

192196
/// Increment the ref count for the associated object.
@@ -921,6 +925,7 @@ unsafe impl GcHeap for DrcHeap {
921925
fn alloc_raw(&mut self, header: VMGcHeader, layout: Layout) -> Result<Result<VMGcRef, u64>> {
922926
debug_assert!(layout.size() >= core::mem::size_of::<VMDrcHeader>());
923927
debug_assert!(layout.align() >= core::mem::align_of::<VMDrcHeader>());
928+
debug_assert!(FreeList::can_align_to(layout.align()));
924929
debug_assert_eq!(header.reserved_u26(), 0);
925930

926931
// We must have trace info for every GC type that we allocate in this
@@ -934,8 +939,9 @@ unsafe impl GcHeap for DrcHeap {
934939
}
935940

936941
let object_size = u32::try_from(layout.size()).unwrap();
942+
let alloc_size = FreeList::aligned_size(object_size);
937943

938-
let gc_ref = match self.free_list.as_mut().unwrap().alloc(layout)? {
944+
let gc_ref = match self.free_list.as_mut().unwrap().alloc_fast(alloc_size) {
939945
None => return Ok(Err(u64::try_from(layout.size()).unwrap())),
940946
Some(index) => VMGcRef::from_heap_index(index).unwrap(),
941947
};

0 commit comments

Comments
 (0)