Skip to content
8 changes: 4 additions & 4 deletions src/policy/compressor/compressorspace.rs
Original file line number Diff line number Diff line change
Expand Up @@ -405,11 +405,11 @@ impl<VM: VMBinding> CompressorSpace<VM> {
pub fn after_compact(&self, worker: &mut GCWorker<VM>, los: &LargeObjectSpace<VM>) {
self.pr.reset_allocator();
// Update references from the LOS to Compressor too.
los.enumerate_objects(&mut object_enum::ClosureObjectEnumerator::<_, VM>::new(
&mut |o: ObjectReference| {
los.enumerate_objects_for_forwarding(
&mut object_enum::ClosureObjectEnumerator::<_, VM>::new(&mut |o: ObjectReference| {
self.update_references(worker, o);
},
));
}),
);
}
}

Expand Down
46 changes: 39 additions & 7 deletions src/policy/largeobjectspace.rs
Original file line number Diff line number Diff line change
Expand Up @@ -219,21 +219,39 @@ impl<VM: VMBinding> Space<VM> for LargeObjectSpace<VM> {
}

fn enumerate_objects(&self, enumerator: &mut dyn ObjectEnumerator) {
self.treadmill.enumerate_objects(enumerator);
// `MMTK::enumerate_objects` is not allowed during GC, so the collection nursery and the
// from space must be empty. In `ConcurrentImmix`, mutators may run during GC and call
// `MMTK::enumerate_objects`. It has undefined behavior according to the current API, so
// the assertion failure is expected.
assert!(
self.treadmill.is_collect_nursery_empty(),
"Collection nursery is not empty"
);
assert!(
self.treadmill.is_from_space_empty(),
"From-space is not empty"
);

// Visit objects in the allocation nursery and the to-space, which contain young and old
// objects, respectively, during mutator time.
self.treadmill.enumerate_objects(enumerator, false);
}

fn clear_side_log_bits(&self) {
let mut enumator = ClosureObjectEnumerator::<_, VM>::new(|object| {
let mut enumerator = ClosureObjectEnumerator::<_, VM>::new(|object| {
VM::VMObjectModel::GLOBAL_LOG_BIT_SPEC.clear::<VM>(object, Ordering::SeqCst);
});
self.treadmill.enumerate_objects(&mut enumator);
// Visit all objects. It can be ordered arbitrarily with `Self::Release` which sweeps dead
// objects (removing them from the treadmill) and clears their unlog bits, too.
self.treadmill.enumerate_objects(&mut enumerator, true);
}

fn set_side_log_bits(&self) {
let mut enumator = ClosureObjectEnumerator::<_, VM>::new(|object| {
let mut enumerator = ClosureObjectEnumerator::<_, VM>::new(|object| {
VM::VMObjectModel::GLOBAL_LOG_BIT_SPEC.mark_as_unlogged::<VM>(object, Ordering::SeqCst);
});
self.treadmill.enumerate_objects(&mut enumator);
// Visit all objects.
self.treadmill.enumerate_objects(&mut enumerator, true);
}
}

Expand Down Expand Up @@ -297,10 +315,16 @@ impl<VM: VMBinding> LargeObjectSpace<VM> {
}

pub fn release(&mut self, full_heap: bool) {
// We swapped the allocation nursery and the collection nursery when GC starts, and we don't
// add objects to the allocation nursery during GC. It should have remained empty during
// the whole GC.
debug_assert!(self.treadmill.is_alloc_nursery_empty());

self.sweep_large_pages(true);
debug_assert!(self.treadmill.is_nursery_empty());
debug_assert!(self.treadmill.is_collect_nursery_empty());
if full_heap {
self.sweep_large_pages(false);
debug_assert!(self.treadmill.is_from_space_empty());
}
}

Expand Down Expand Up @@ -364,12 +388,20 @@ impl<VM: VMBinding> LargeObjectSpace<VM> {
sweep(object);
}
} else {
for object in self.treadmill.collect() {
for object in self.treadmill.collect_mature() {
sweep(object)
}
}
}

/// Enumerate objects for forwarding references. Used by certain mark-compact plans.
pub(crate) fn enumerate_objects_for_forwarding(&self, enumerator: &mut dyn ObjectEnumerator) {
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I suggest avoid using the term 'forwarding' here. LOS does not know what forwarding means -- it depends on the plan, like when the function will be called, and what object it should list, etc.

It should be the plan that instructs LOS to iterate objects in to-space and to assert there is no nursery object (it is a hack that currently the compressor space does that instead of the plan).

// The alloc nursery should have remained empty during the GC.
debug_assert!(self.treadmill.is_alloc_nursery_empty());
// We only need to visit the to_space, which contains all objects determined to be live.
self.treadmill.enumerate_objects(enumerator, false);
}

/// Allocate an object
pub fn allocate_pages(
&self,
Expand Down
139 changes: 94 additions & 45 deletions src/util/treadmill.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,111 +6,160 @@ use crate::util::ObjectReference;

use super::object_enum::ObjectEnumerator;

/// A data structure for recording objects in the LOS.
///
/// All operations are protected by a single mutex [`TreadMill::sync`].
pub struct TreadMill {
from_space: Mutex<HashSet<ObjectReference>>,
to_space: Mutex<HashSet<ObjectReference>>,
collect_nursery: Mutex<HashSet<ObjectReference>>,
alloc_nursery: Mutex<HashSet<ObjectReference>>,
sync: Mutex<TreadMillSync>,
}

/// The synchronized part of [`TreadMill`]
#[derive(Default)]
struct TreadMillSync {
/// The from-space. During GC, it contains old objects with unknown liveness.
from_space: HashSet<ObjectReference>,
/// The to-space. During mutator time, it contains old objects; during GC, it contains objects
/// determined to be live.
to_space: HashSet<ObjectReference>,
/// The collection nursery. During GC, it contains young objects with unknown liveness.
collect_nursery: HashSet<ObjectReference>,
/// The allocation nursery. During mutator time, it contains young objects; during GC, it
/// remains empty.
alloc_nursery: HashSet<ObjectReference>,
}

impl std::fmt::Debug for TreadMill {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let sync = self.sync.lock().unwrap();
f.debug_struct("TreadMill")
.field("from", &self.from_space.lock().unwrap())
.field("to", &self.to_space.lock().unwrap())
.field("collect_nursery", &self.collect_nursery.lock().unwrap())
.field("alloc_nursery", &self.alloc_nursery.lock().unwrap())
.field("from_space", &sync.from_space)
.field("to_space", &sync.to_space)
.field("collect_nursery", &sync.collect_nursery)
.field("alloc_nursery", &sync.alloc_nursery)
.finish()
}
}

impl TreadMill {
pub fn new() -> Self {
TreadMill {
from_space: Mutex::new(HashSet::new()),
to_space: Mutex::new(HashSet::new()),
collect_nursery: Mutex::new(HashSet::new()),
alloc_nursery: Mutex::new(HashSet::new()),
sync: Mutex::new(Default::default()),
}
}

/// Add an object to the treadmill.
///
/// New objects are normally added to `alloc_nursery`. But when allocating as live (e.g. when
/// concurrent marking is active), we directly add into the `to_space`.
pub fn add_to_treadmill(&self, object: ObjectReference, nursery: bool) {
let mut sync = self.sync.lock().unwrap();
if nursery {
trace!("Adding {} to nursery", object);
self.alloc_nursery.lock().unwrap().insert(object);
trace!("Adding {} to alloc_nursery", object);
sync.alloc_nursery.insert(object);
} else {
trace!("Adding {} to to_space", object);
self.to_space.lock().unwrap().insert(object);
sync.to_space.insert(object);
}
}

pub fn collect_nursery(&self) -> Vec<ObjectReference> {
let mut guard = self.collect_nursery.lock().unwrap();
let vals = guard.iter().copied().collect();
guard.clear();
drop(guard);
vals
/// Take all objects from the `collect_nursery`. This is called during sweeping at which time
/// all unreachable young objects are in the collection nursery.
pub fn collect_nursery(&self) -> impl IntoIterator<Item = ObjectReference> {
let mut sync = self.sync.lock().unwrap();
std::mem::take(&mut sync.collect_nursery)
}

pub fn collect(&self) -> Vec<ObjectReference> {
let mut guard = self.from_space.lock().unwrap();
let vals = guard.iter().copied().collect();
guard.clear();
drop(guard);
vals
/// Take all objects from the `from_space`. This is called during sweeping at which time all
/// unreachable old objects are in the from-space.
pub fn collect_mature(&self) -> impl IntoIterator<Item = ObjectReference> {
let mut sync = self.sync.lock().unwrap();
std::mem::take(&mut sync.from_space)
}

/// Move an object to `to_space`. Called when an object is determined to be reachable.
pub fn copy(&self, object: ObjectReference, is_in_nursery: bool) {
let mut sync = self.sync.lock().unwrap();
if is_in_nursery {
let mut guard = self.collect_nursery.lock().unwrap();
debug_assert!(
guard.contains(&object),
sync.collect_nursery.contains(&object),
"copy source object ({}) must be in collect_nursery",
object
);
guard.remove(&object);
sync.collect_nursery.remove(&object);
} else {
let mut guard = self.from_space.lock().unwrap();
debug_assert!(
guard.contains(&object),
sync.from_space.contains(&object),
"copy source object ({}) must be in from_space",
object
);
guard.remove(&object);
sync.from_space.remove(&object);
}
self.to_space.lock().unwrap().insert(object);
sync.to_space.insert(object);
}

/// Return true if the to-space is empty.
pub fn is_to_space_empty(&self) -> bool {
self.to_space.lock().unwrap().is_empty()
let sync = self.sync.lock().unwrap();
sync.to_space.is_empty()
}

/// Return true if the from-space is empty.
pub fn is_from_space_empty(&self) -> bool {
self.from_space.lock().unwrap().is_empty()
let sync = self.sync.lock().unwrap();
sync.from_space.is_empty()
}

/// Return true if the allocation nursery is empty.
pub fn is_alloc_nursery_empty(&self) -> bool {
let sync = self.sync.lock().unwrap();
sync.alloc_nursery.is_empty()
}

pub fn is_nursery_empty(&self) -> bool {
self.collect_nursery.lock().unwrap().is_empty()
/// Return true if the collection nursery is empty.
pub fn is_collect_nursery_empty(&self) -> bool {
let sync = self.sync.lock().unwrap();
sync.collect_nursery.is_empty()
}

/// Flip object sets.
///
/// It will flip the allocation nursery and the collection nursery.
///
/// If `full_heap` is true, it will also flip the from-space and the to-space.
pub fn flip(&mut self, full_heap: bool) {
swap(&mut self.alloc_nursery, &mut self.collect_nursery);
let sync = self.sync.get_mut().unwrap();
swap(&mut sync.alloc_nursery, &mut sync.collect_nursery);
trace!("Flipped alloc_nursery and collect_nursery");
if full_heap {
swap(&mut self.from_space, &mut self.to_space);
swap(&mut sync.from_space, &mut sync.to_space);
trace!("Flipped from_space and to_space");
}
}

pub(crate) fn enumerate_objects(&self, enumerator: &mut dyn ObjectEnumerator) {
let mut visit_objects = |set: &Mutex<HashSet<ObjectReference>>| {
let set = set.lock().unwrap();
/// Enumerate objects.
///
/// Objects in the allocation nursery and the to-spaces are always enumerated. They include all
/// objects during mutator time, and objects determined to be live during a GC.
///
/// If `all` is true, it will enumerate the collection nursery and the from-space, too.
pub(crate) fn enumerate_objects(&self, enumerator: &mut dyn ObjectEnumerator, all: bool) {
let sync = self.sync.lock().unwrap();
let mut enumerated = 0usize;
let mut visit_objects = |set: &HashSet<ObjectReference>| {
for object in set.iter() {
enumerator.visit_object(*object);
enumerated += 1;
}
};
visit_objects(&self.alloc_nursery);
visit_objects(&self.to_space);
visit_objects(&sync.alloc_nursery);
visit_objects(&sync.to_space);

if all {
visit_objects(&sync.collect_nursery);
visit_objects(&sync.from_space);
}

debug!("Enumerated {enumerated} objects in LOS. all: {all}");
}
}

Expand Down
Loading