Skip to content

Commit

Permalink
Incremental GC: Optimize occupied size computation (#4021)
Browse files Browse the repository at this point in the history
This is a minor optimization for the incremental GC:
Currently, the occupied heap size is computed by iterating through the partition table which is performed on each GC scheduling point. This optimization caches the heap size, by precomputing the occupied size in all partitions except for the dynamic space of the current allocation partition (where the dump allocator is active).

Instructions executed on GC scheduling:
Unoptimized | Optimized | Saving
-------------| --------------- |-----
5899 | 322 | 95%

In the GC benchmark, the performance gain is, however, negligible. It could be more relevant when moving to 64-bit.
  • Loading branch information
luc-blaeser authored Jun 26, 2023
1 parent 33b481c commit 8bc3853
Showing 1 changed file with 31 additions and 8 deletions.
39 changes: 31 additions & 8 deletions rts/motoko-rts/src/gc/incremental/partitioned_heap.rs
Original file line number Diff line number Diff line change
Expand Up @@ -125,6 +125,10 @@ impl Partition {
self.dynamic_size
}

pub fn occuped_size(&self) -> usize {
self.dynamic_size + self.static_size
}

pub fn marked_size(&self) -> usize {
self.marked_size
}
Expand Down Expand Up @@ -349,6 +353,7 @@ pub struct PartitionedHeap {
reclaimed: u64,
bitmap_allocation_pointer: usize, // Free pointer for allocating the next mark bitmap.
gc_running: bool, // Create bitmaps for partitions when allocated during active GC.
precomputed_heap_size: usize, // Occupied heap size, excluding the dynamic heap in the allocation partition.
}

/// Optimization: Avoiding `Option` or `LazyCell`.
Expand All @@ -361,6 +366,7 @@ pub const UNINITIALIZED_HEAP: PartitionedHeap = PartitionedHeap {
reclaimed: 0,
bitmap_allocation_pointer: 0,
gc_running: false,
precomputed_heap_size: 0,
};

impl PartitionedHeap {
Expand Down Expand Up @@ -396,6 +402,7 @@ impl PartitionedHeap {
reclaimed: 0,
bitmap_allocation_pointer: 0,
gc_running: false,
precomputed_heap_size: heap_base,
}
}

Expand Down Expand Up @@ -477,6 +484,7 @@ impl PartitionedHeap {
}

pub unsafe fn start_collection<M: Memory>(&mut self, mem: &mut M, time: &mut BoundedTime) {
self.check_occupied_size();
debug_assert_eq!(self.bitmap_allocation_pointer, 0);
debug_assert!(!self.gc_running);
self.gc_running = true;
Expand Down Expand Up @@ -552,6 +560,7 @@ impl PartitionedHeap {
self.reclaimed += (partition.dynamic_size - marked_size) as u64;
}
if partition.to_be_evacuated() || partition.temporary {
self.precomputed_heap_size -= partition.dynamic_size;
partition.free();
self.free_partitions += 1;
}
Expand All @@ -560,13 +569,18 @@ impl PartitionedHeap {
self.bitmap_allocation_pointer = 0;
debug_assert!(self.gc_running);
self.gc_running = false;
self.check_occupied_size();
}

pub fn updates_needed(&self) -> bool {
self.evacuating
}

fn allocation_partition(&mut self) -> &mut Partition {
fn allocation_partition(&self) -> &Partition {
&self.partitions[self.allocation_index]
}

fn mut_allocation_partition(&mut self) -> &mut Partition {
&mut self.partitions[self.allocation_index]
}

Expand Down Expand Up @@ -599,13 +613,18 @@ impl PartitionedHeap {
rts_trap_with("Cannot grow memory");
}

fn check_occupied_size(&self) {
debug_assert_eq!(
self.partitions
.iter()
.map(|partition| partition.static_size + partition.dynamic_size)
.sum::<usize>(),
self.occupied_size().as_usize()
);
}

pub fn occupied_size(&self) -> Bytes<u32> {
let occupied_size: usize = self
.partitions
.iter()
.map(|partition| partition.static_size + partition.dynamic_size)
.sum();
Bytes(occupied_size as u32)
Bytes((self.precomputed_heap_size + self.allocation_partition().dynamic_size) as u32)
}

pub fn reclaimed_size(&self) -> Bytes<u64> {
Expand All @@ -623,7 +642,7 @@ impl PartitionedHeap {

unsafe fn allocate_normal_object<M: Memory>(&mut self, mem: &mut M, size: usize) -> Value {
debug_assert!(size <= PARTITION_SIZE);
let mut allocation_partition = self.allocation_partition();
let mut allocation_partition = self.mut_allocation_partition();
debug_assert!(!allocation_partition.free);
let heap_pointer = allocation_partition.dynamic_space_end();
debug_assert!(size <= allocation_partition.end_address());
Expand All @@ -641,6 +660,8 @@ impl PartitionedHeap {
#[cfg(feature = "memory_check")]
self.allocation_partition().clear_free_remainder();

self.precomputed_heap_size += self.allocation_partition().dynamic_size;

let new_partition = self.allocate_free_partition(mem, size);
mem.grow_memory(new_partition.end_address() as u64);
let heap_pointer = new_partition.dynamic_space_end();
Expand Down Expand Up @@ -683,6 +704,7 @@ impl PartitionedHeap {
} else {
partition.dynamic_size = PARTITION_SIZE;
}
self.precomputed_heap_size += partition.dynamic_size;
}
let first_partition = self.mutable_partition(first_index);
Value::from_ptr(first_partition.dynamic_space_start())
Expand Down Expand Up @@ -744,6 +766,7 @@ impl PartitionedHeap {
partition.update = false;
partition.free();
self.reclaimed += size as u64;
self.precomputed_heap_size -= size;
}
}

Expand Down

0 comments on commit 8bc3853

Please sign in to comment.