Skip to content

Ensure log bits are correctly maintained #1169

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 31 commits into from
Apr 12, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
31 commits
Select commit Hold shift + click to select a range
e8502e3
Only reset unlog bits of objects in modbuf for nursery GCs
k-sareen Jul 17, 2024
1ecc314
Fix typos
k-sareen Jul 29, 2024
907cca5
cargo fmt
k-sareen Jul 29, 2024
1238865
Fix typos
k-sareen Jul 29, 2024
854dea6
Fix typo
k-sareen Jul 29, 2024
2b32380
Address review
k-sareen Jul 31, 2024
0c4cea3
Fix stale unlog bits for LOS and MarkSweepSpace objects
k-sareen Jul 18, 2024
9dec2a3
cargo fmt
k-sareen Nov 29, 2024
bf5d1cb
Fix failing build
k-sareen Nov 29, 2024
e862642
cargo fmt
k-sareen Nov 29, 2024
2884f6d
Merge branch 'master' into fix/modbuf-global-gc-bug
k-sareen Jan 6, 2025
4c81ca3
Merge branch 'master' into fix/modbuf-global-gc-bug
k-sareen Mar 31, 2025
78ffd7e
Merge branch 'master' into fix/modbuf-global-gc-bug
k-sareen Apr 8, 2025
f152bff
WIP: Address comments
k-sareen Feb 13, 2025
59193a9
Cleanup and add more debug assertions
k-sareen Apr 8, 2025
ce9667c
Fix debug assertion
k-sareen Apr 8, 2025
a4dea71
Zero log bits for dead line
k-sareen Apr 8, 2025
fcaca46
Address Yi's comment about in-header log bit
k-sareen Apr 9, 2025
9968150
Fix failing builds
k-sareen Apr 9, 2025
2390cec
Fix failing build
k-sareen Apr 9, 2025
85f915c
Merge branch 'master' into fix/modbuf-global-gc-bug
k-sareen Apr 9, 2025
2e04346
Fix assertion
k-sareen Apr 9, 2025
cb98417
Remove incorrect assertion from StickyImmix
k-sareen Apr 9, 2025
213f77e
Remove assertions
k-sareen Apr 9, 2025
ccd7d8a
Dump memory map for debugging
k-sareen Apr 9, 2025
0fcda6f
Fix bug for GenImmix and remove unused field in `ImmixSpaceArgs`
k-sareen Apr 10, 2025
3ece622
Revert "Zero log bits for dead line"
k-sareen Apr 10, 2025
f603c6b
Remove unused import
k-sareen Apr 10, 2025
aa232d3
Allow dead code for `PrepareBlockState`
k-sareen Apr 10, 2025
0a3cac4
Clear log bits for CopySpace in release
k-sareen Apr 10, 2025
899c83d
Merge branch 'master' into fix/modbuf-global-gc-bug
k-sareen Apr 11, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions src/plan/barriers.rs
Original file line number Diff line number Diff line change
Expand Up @@ -171,13 +171,13 @@ impl<S: BarrierSemantics> ObjectBarrier<S> {
Self { semantics }
}

/// Attepmt to atomically log an object.
/// Attempt to atomically log an object.
/// Returns true if the object is not logged previously.
fn object_is_unlogged(&self, object: ObjectReference) -> bool {
unsafe { S::UNLOG_BIT_SPEC.load::<S::VM, u8>(object, None) != 0 }
}

/// Attepmt to atomically log an object.
/// Attempt to atomically log an object.
/// Returns true if the object is not logged previously.
fn log_object(&self, object: ObjectReference) -> bool {
#[cfg(all(feature = "vo_bit", feature = "extreme_assertions"))]
Expand Down
34 changes: 18 additions & 16 deletions src/plan/generational/gc_work.rs
Original file line number Diff line number Diff line change
Expand Up @@ -108,22 +108,24 @@ impl<E: ProcessEdgesWork> ProcessModBuf<E> {

impl<E: ProcessEdgesWork> GCWork<E::VM> for ProcessModBuf<E> {
fn do_work(&mut self, worker: &mut GCWorker<E::VM>, mmtk: &'static MMTK<E::VM>) {
// Flip the per-object unlogged bits to "unlogged" state.
for obj in &self.modbuf {
<E::VM as VMBinding>::VMObjectModel::GLOBAL_LOG_BIT_SPEC.store_atomic::<E::VM, u8>(
*obj,
1,
None,
Ordering::SeqCst,
);
}
// scan modbuf only if the current GC is a nursery GC
if mmtk
.get_plan()
.generational()
.unwrap()
.is_current_gc_nursery()
{
// Process and scan modbuf only if the current GC is a nursery GC
let gen = mmtk.get_plan().generational().unwrap();
if gen.is_current_gc_nursery() {
// Flip the per-object unlogged bits to "unlogged" state.
for obj in &self.modbuf {
debug_assert!(
!gen.is_object_in_nursery(*obj),
"{} was logged but is not mature. Dumping process memory maps:\n{}",
*obj,
crate::util::memory::get_process_memory_maps(),
);
<E::VM as VMBinding>::VMObjectModel::GLOBAL_LOG_BIT_SPEC.store_atomic::<E::VM, u8>(
*obj,
1,
None,
Ordering::SeqCst,
);
}
// Scan objects in the modbuf and forward pointers
let modbuf = std::mem::take(&mut self.modbuf);
GCWork::do_work(
Expand Down
1 change: 1 addition & 0 deletions src/plan/generational/global.rs
Original file line number Diff line number Diff line change
Expand Up @@ -227,6 +227,7 @@ impl<VM: VMBinding> CommonGenPlan<VM> {
if self.common.get_los().in_space(object) {
return self.common.get_los().trace_object::<Q>(queue, object);
}

object
}

Expand Down
6 changes: 2 additions & 4 deletions src/plan/generational/immix/global.rs
Original file line number Diff line number Diff line change
Expand Up @@ -249,10 +249,8 @@ impl<VM: VMBinding> GenImmix<VM> {
let immix_space = ImmixSpace::new(
plan_args.get_space_args("immix_mature", true, false, VMRequest::discontiguous()),
ImmixSpaceArgs {
reset_log_bit_in_major_gc: false,
// We don't need to unlog objects at tracing. Instead, we unlog objects at copying.
// Any object is moved into the mature space, or is copied inside the mature space. We will unlog it.
unlog_object_when_traced: false,
// We need to unlog objects at tracing time since we currently clear all log bits during a major GC
unlog_object_when_traced: true,
// In GenImmix, young objects are not allocated in ImmixSpace directly.
#[cfg(feature = "vo_bit")]
mixed_age: false,
Expand Down
1 change: 0 additions & 1 deletion src/plan/immix/global.rs
Original file line number Diff line number Diff line change
Expand Up @@ -136,7 +136,6 @@ impl<VM: VMBinding> Immix<VM> {
Self::new_with_args(
plan_args,
ImmixSpaceArgs {
reset_log_bit_in_major_gc: false,
unlog_object_when_traced: false,
#[cfg(feature = "vo_bit")]
mixed_age: false,
Expand Down
2 changes: 1 addition & 1 deletion src/plan/marksweep/global.rs
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ impl<VM: VMBinding> Plan for MarkSweep<VM> {

fn prepare(&mut self, tls: VMWorkerThread) {
self.common.prepare(tls, true);
self.ms.prepare();
self.ms.prepare(true);
}

fn release(&mut self, tls: VMWorkerThread) {
Expand Down
4 changes: 1 addition & 3 deletions src/plan/sticky/immix/global.rs
Original file line number Diff line number Diff line change
Expand Up @@ -254,6 +254,7 @@ impl<VM: VMBinding> crate::plan::generational::global::GenerationalPlanExt<VM> f
trace!("Immix mature object {}, skip", object);
return object;
} else {
// Nursery object
let object = if KIND == TRACE_KIND_TRANSITIVE_PIN || KIND == TRACE_KIND_FAST {
trace!(
"Immix nursery object {} is being traced without moving",
Expand Down Expand Up @@ -326,9 +327,6 @@ impl<VM: VMBinding> StickyImmix<VM> {
// Every object we trace in full heap GC is a mature object. Thus in both cases,
// they should be unlogged.
unlog_object_when_traced: true,
// In full heap GC, mature objects may die, and their unlogged bit needs to be reset.
// Along with the option above, we unlog them again during tracing.
reset_log_bit_in_major_gc: true,
// In StickyImmix, both young and old objects are allocated in the ImmixSpace.
#[cfg(feature = "vo_bit")]
mixed_age: true,
Expand Down
6 changes: 6 additions & 0 deletions src/policy/copyspace.rs
Original file line number Diff line number Diff line change
Expand Up @@ -196,6 +196,12 @@ impl<VM: VMBinding> CopySpace<VM> {
side_forwarding_status_table.bzero_metadata(start, size);
}

if self.common.needs_log_bit {
if let MetadataSpec::OnSide(side) = *VM::VMObjectModel::GLOBAL_LOG_BIT_SPEC {
side.bzero_metadata(start, size);
}
}

// Clear VO bits because all objects in the space are dead.
#[cfg(feature = "vo_bit")]
crate::util::metadata::vo_bit::bzero_vo_bit(start, size);
Expand Down
30 changes: 10 additions & 20 deletions src/policy/immix/immixspace.rs
Original file line number Diff line number Diff line change
Expand Up @@ -65,14 +65,6 @@ pub struct ImmixSpaceArgs {
/// (no matter we copy an object or not). So we have to use `PromoteToMature`, and instead
/// just set the log bit in the space when an object is traced.
pub unlog_object_when_traced: bool,
/// Reset log bit at the start of a major GC.
/// Normally we do not need to do this. When immix is used as the mature space,
/// any object should be set as unlogged, and that bit does not need to be cleared
/// even if the object is dead. But in sticky Immix, the mature object and
/// the nursery object are in the same space, we will have to use the
/// bit to differentiate them. So we reset all the log bits in major GCs,
/// and unlogged the objects when they are traced (alive).
pub reset_log_bit_in_major_gc: bool,
/// Whether this ImmixSpace instance contains both young and old objects.
/// This affects the updating of valid-object bits. If some lines or blocks of this ImmixSpace
/// instance contain young objects, their VO bits need to be updated during this GC. Currently
Expand Down Expand Up @@ -293,7 +285,7 @@ impl<VM: VMBinding> ImmixSpace<VM> {
Block::LOG_BYTES
);

if space_args.unlog_object_when_traced || space_args.reset_log_bit_in_major_gc {
if space_args.unlog_object_when_traced {
assert!(
args.constraints.needs_log_bit,
"Invalid args when the plan does not use log bit"
Expand Down Expand Up @@ -389,6 +381,14 @@ impl<VM: VMBinding> ImmixSpace<VM> {
unimplemented!("cyclic mark bits is not supported at the moment");
}

if self.common.needs_log_bit {
if let MetadataSpec::OnSide(side) = *VM::VMObjectModel::GLOBAL_LOG_BIT_SPEC {
for chunk in self.chunk_map.all_chunks() {
side.bzero_metadata(chunk.start(), Chunk::BYTES);
}
}
}

// Prepare defrag info
if super::DEFRAG {
self.defrag.prepare(self, plan_stats);
Expand Down Expand Up @@ -838,6 +838,7 @@ impl<VM: VMBinding> ImmixSpace<VM> {
/// A work packet to prepare each block for a major GC.
/// Performs the action on a range of chunks.
pub struct PrepareBlockState<VM: VMBinding> {
#[allow(dead_code)]
pub space: &'static ImmixSpace<VM>,
pub chunk: Chunk,
pub defrag_threshold: Option<usize>,
Expand All @@ -851,17 +852,6 @@ impl<VM: VMBinding> PrepareBlockState<VM> {
if let MetadataSpec::OnSide(side) = *VM::VMObjectModel::LOCAL_MARK_BIT_SPEC {
side.bzero_metadata(self.chunk.start(), Chunk::BYTES);
}
if self.space.space_args.reset_log_bit_in_major_gc {
if let MetadataSpec::OnSide(side) = *VM::VMObjectModel::GLOBAL_LOG_BIT_SPEC {
// We zero all the log bits in major GC, and for every object we trace, we will mark the log bit again.
side.bzero_metadata(self.chunk.start(), Chunk::BYTES);
} else {
// If the log bit is not in side metadata, we cannot bulk zero. We can either
// clear the bit for dead objects in major GC, or clear the log bit for new
// objects. In either cases, we do not need to set log bit at tracing.
unimplemented!("We cannot bulk zero unlogged bit.")
}
}
}
}

Expand Down
9 changes: 9 additions & 0 deletions src/policy/immortalspace.rs
Original file line number Diff line number Diff line change
Expand Up @@ -213,6 +213,15 @@ impl<VM: VMBinding> ImmortalSpace<VM> {
object
);
if self.mark_state.test_and_mark::<VM>(object) {
// Set the unlog bit if required
if self.common.needs_log_bit {
VM::VMObjectModel::GLOBAL_LOG_BIT_SPEC.store_atomic::<VM, u8>(
object,
1,
None,
Ordering::SeqCst,
);
}
queue.enqueue(object);
}
object
Expand Down
8 changes: 7 additions & 1 deletion src/policy/largeobjectspace.rs
Original file line number Diff line number Diff line change
Expand Up @@ -269,7 +269,9 @@ impl<VM: VMBinding> LargeObjectSpace<VM> {
trace!("LOS object {} is being marked now", object);
self.treadmill.copy(object, nursery_object);
// We just moved the object out of the logical nursery, mark it as unlogged.
if nursery_object && self.common.needs_log_bit {
// We also unlog mature objects as their unlog bit may have been unset before the
// full-heap GC
if self.common.needs_log_bit {
VM::VMObjectModel::GLOBAL_LOG_BIT_SPEC
.mark_as_unlogged::<VM>(object, Ordering::SeqCst);
}
Expand All @@ -288,6 +290,10 @@ impl<VM: VMBinding> LargeObjectSpace<VM> {
let sweep = |object: ObjectReference| {
#[cfg(feature = "vo_bit")]
crate::util::metadata::vo_bit::unset_vo_bit(object);
// Clear log bits for dead objects to prevent a new nursery object having the unlog bit set
if self.common.needs_log_bit {
VM::VMObjectModel::GLOBAL_LOG_BIT_SPEC.clear::<VM>(object, Ordering::SeqCst);
}
self.pr
.release_pages(get_super_page(object.to_object_start::<VM>()));
};
Expand Down
2 changes: 1 addition & 1 deletion src/policy/marksweepspace/malloc_ms/global.rs
Original file line number Diff line number Diff line change
Expand Up @@ -488,7 +488,7 @@ impl<VM: VMBinding> MallocSpace<VM> {
}
}

pub fn prepare(&mut self) {}
pub fn prepare(&mut self, _full_heap: bool) {}

pub fn release(&mut self) {
use crate::scheduler::WorkBucketStage;
Expand Down
10 changes: 9 additions & 1 deletion src/policy/marksweepspace/native_ms/global.rs
Original file line number Diff line number Diff line change
Expand Up @@ -405,7 +405,15 @@ impl<VM: VMBinding> MarkSweepSpace<VM> {
self.chunk_map.set(block.chunk(), ChunkState::Allocated);
}

pub fn prepare(&mut self) {
pub fn prepare(&mut self, full_heap: bool) {
if self.common.needs_log_bit && full_heap {
if let MetadataSpec::OnSide(side) = *VM::VMObjectModel::GLOBAL_LOG_BIT_SPEC {
for chunk in self.chunk_map.all_chunks() {
side.bzero_metadata(chunk.start(), Chunk::BYTES);
}
}
}

#[cfg(debug_assertions)]
self.abandoned_in_gc.lock().unwrap().assert_empty();

Expand Down
11 changes: 11 additions & 0 deletions src/policy/vmspace.rs
Original file line number Diff line number Diff line change
Expand Up @@ -283,6 +283,17 @@ impl<VM: VMBinding> VMSpace<VM> {
);
debug_assert!(self.in_space(object));
if self.mark_state.test_and_mark::<VM>(object) {
// Flip the per-object unlogged bits to "unlogged" state for objects inside the
// bootimage
#[cfg(feature = "set_unlog_bits_vm_space")]
Copy link
Collaborator Author

@k-sareen k-sareen Jul 29, 2024

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Perhaps we can remove the conditional compile feature here since it is likely objects in the VM space will want to have the unlog bit set and the feature was more for initialization

if self.common.needs_log_bit {
VM::VMObjectModel::GLOBAL_LOG_BIT_SPEC.store_atomic::<VM, u8>(
object,
1,
None,
Ordering::SeqCst,
);
}
queue.enqueue(object);
}
object
Expand Down
5 changes: 5 additions & 0 deletions src/util/metadata/log_bit.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,11 @@ use std::sync::atomic::Ordering;
use super::MetadataSpec;

impl VMGlobalLogBitSpec {
/// Clear the unlog bit to log object (0 means logged)
pub fn clear<VM: VMBinding>(&self, object: ObjectReference, order: Ordering) {
self.store_atomic::<VM, u8>(object, 0, None, order)
}

/// Mark the log bit as unlogged (1 means unlogged)
pub fn mark_as_unlogged<VM: VMBinding>(&self, object: ObjectReference, order: Ordering) {
self.store_atomic::<VM, u8>(object, 1, None, order)
Expand Down
Loading