Skip to content

Commit

Permalink
swiper: pass min_size/max_size for allocations
Browse files Browse the repository at this point in the history
  • Loading branch information
dinfuehr committed Jan 5, 2024
1 parent 4bbcc21 commit fe2ecbc
Show file tree
Hide file tree
Showing 10 changed files with 129 additions and 137 deletions.
8 changes: 4 additions & 4 deletions dora-runtime/src/compiler/asm.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ use crate::cpu::{
FReg, Reg, FREG_RESULT, REG_PARAMS, REG_RESULT, REG_SP, REG_THREAD, REG_TMP1, REG_TMP2,
STACK_FRAME_ALIGNMENT,
};
use crate::gc::tlab::TLAB_OBJECT_SIZE;
use crate::gc::tlab::MAX_TLAB_OBJECT_SIZE;
use crate::gc::Address;
use crate::masm::{CondCode, Label, MacroAssembler, Mem, ScratchReg};
use crate::mode::MachineMode;
Expand Down Expand Up @@ -822,12 +822,12 @@ impl<'a> BaselineAssembler<'a> {
match size {
AllocationSize::Dynamic(reg_size) => {
self.masm
.cmp_reg_imm(MachineMode::Ptr, reg_size, TLAB_OBJECT_SIZE as i32);
.cmp_reg_imm(MachineMode::Ptr, reg_size, MAX_TLAB_OBJECT_SIZE as i32);
self.masm.jump_if(CondCode::GreaterEq, lbl_slow_path);
}

AllocationSize::Fixed(size) => {
assert!(size < TLAB_OBJECT_SIZE);
assert!(size < MAX_TLAB_OBJECT_SIZE);
}
}

Expand Down Expand Up @@ -978,7 +978,7 @@ impl<'a> BaselineAssembler<'a> {

match size {
AllocationSize::Fixed(fixed_size) => {
if fixed_size < TLAB_OBJECT_SIZE {
if fixed_size < MAX_TLAB_OBJECT_SIZE {
self.tlab_allocate(dest, size, location, gcpoint);
} else {
self.gc_allocate(dest, size, location, gcpoint);
Expand Down
4 changes: 2 additions & 2 deletions dora-runtime/src/gc.rs
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ use crate::gc::copy::CopyCollector;
use crate::gc::space::{Space, SpaceConfig};
use crate::gc::sweep::SweepCollector;
use crate::gc::swiper::{Swiper, CARD_SIZE};
use crate::gc::tlab::TLAB_OBJECT_SIZE;
use crate::gc::tlab::MAX_TLAB_OBJECT_SIZE;
use crate::gc::zero::ZeroCollector;
use crate::mem;
use crate::object::{Header, Obj};
Expand Down Expand Up @@ -127,7 +127,7 @@ impl Gc {
self.collect(vm, GcReason::Stress);
}

if size < TLAB_OBJECT_SIZE && self.supports_tlab {
if size < MAX_TLAB_OBJECT_SIZE && self.supports_tlab {
self.alloc_tlab(vm, size)
} else {
self.collector.alloc(vm, size)
Expand Down
2 changes: 1 addition & 1 deletion dora-runtime/src/gc/allocator.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,6 @@ use crate::gc::{Address, Region};
use crate::vm::VM;

pub trait GenerationAllocator {
fn allocate(&self, vm: &VM, size: usize) -> Option<Address>;
fn allocate(&self, vm: &VM, min_size: usize, max_size: usize) -> Option<Address>;
fn free(&self, region: Region);
}
71 changes: 63 additions & 8 deletions dora-runtime/src/gc/swiper.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,9 @@ use std::fmt;
use std::mem::size_of;
use std::sync::Arc;

pub use crate::gc::swiper::old::Page;
pub use crate::gc::swiper::young::YoungAlloc;

use crate::gc::allocator::GenerationAllocator;
use crate::gc::root::{determine_strong_roots, Slot};
use crate::gc::swiper::card::CardTable;
use crate::gc::swiper::controller::{HeapController, SharedHeapConfig};
Expand Down Expand Up @@ -45,11 +45,11 @@ const YOUNG_RATIO: usize = 2;
// heap is divided into cards of size CARD_SIZE.
// card entry determines whether this part of the heap was modified
// in minor collections those parts of the heap need to be analyzed
pub const CARD_SIZE: usize = 512;
pub const CARD_SIZE_BITS: usize = 9;
pub const CARD_SIZE: usize = 1 << CARD_SIZE_BITS;
pub const CARD_REFS: usize = CARD_SIZE / size_of::<usize>();

pub const LARGE_OBJECT_SIZE: usize = 64 * K;
pub const LARGE_OBJECT_SIZE: usize = 32 * K;
pub const PAGE_SIZE: usize = 128 * K;
pub const PAGE_HEADER_SIZE: usize = 64 * K;

Expand Down Expand Up @@ -380,13 +380,15 @@ impl Swiper {
}

fn alloc_normal(&self, vm: &VM, size: usize) -> Address {
if let Some(address) = self.young.bump_alloc(vm, size) {
if let Some(address) = self.young.allocate(vm, size, size) {
return address;
}

self.perform_collection_and_choose(vm, GcReason::AllocationFailure);

self.young.bump_alloc(vm, size).unwrap_or(Address::null())
self.young
.allocate(vm, size, size)
.unwrap_or(Address::null())
}

fn alloc_large(&self, vm: &VM, size: usize) -> Address {
Expand All @@ -406,19 +408,19 @@ impl Collector for Swiper {
}

fn alloc_tlab_area(&self, vm: &VM, size: usize) -> Option<Region> {
if let Some(address) = self.young.bump_alloc(vm, size) {
if let Some(address) = self.young.allocate(vm, size, size) {
return Some(address.region_start(size));
}

self.perform_collection_and_choose(vm, GcReason::AllocationFailure);

if let Some(address) = self.young.bump_alloc(vm, size) {
if let Some(address) = self.young.allocate(vm, size, size) {
return Some(address.region_start(size));
}

self.perform_collection(vm, CollectionKind::Full, GcReason::AllocationFailure);

if let Some(address) = self.young.bump_alloc(vm, size) {
if let Some(address) = self.young.allocate(vm, size, size) {
return Some(address.region_start(size));
}

Expand Down Expand Up @@ -635,3 +637,56 @@ fn forward_minor(object: Address, young: Region) -> Option<Address> {
Some(object)
}
}

#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct Page(Address);

impl Page {
pub fn new(start: Address) -> Page {
Page(start)
}

pub fn from_address(value: Address) -> Page {
let page_start = value.to_usize() & !(PAGE_SIZE - 1);
Page::new(page_start.into())
}

pub fn initialize_header(&self) {
unsafe {
let header = std::slice::from_raw_parts_mut(
self.start().to_mut_ptr::<usize>(),
PAGE_HEADER_SIZE / mem::ptr_width_usize(),
);

header.fill(0xDEAD2BAD);
}
}

pub fn area(&self) -> Region {
Region::new(self.start(), self.end())
}

pub fn start(&self) -> Address {
self.0
}

pub fn end(&self) -> Address {
self.start().offset(PAGE_SIZE)
}

pub fn size(&self) -> usize {
PAGE_SIZE
}

pub fn object_area(&self) -> Region {
Region::new(self.object_area_start(), self.object_area_end())
}

pub fn object_area_start(&self) -> Address {
self.start().offset(PAGE_HEADER_SIZE)
}

pub fn object_area_end(&self) -> Address {
self.end()
}
}
5 changes: 3 additions & 2 deletions dora-runtime/src/gc/swiper/full.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,9 @@ use crate::gc::swiper::card::CardTable;
use crate::gc::swiper::controller::FullCollectorPhases;
use crate::gc::swiper::crossing::CrossingMap;
use crate::gc::swiper::large::LargeSpace;
use crate::gc::swiper::old::{OldGen, OldGenProtected, Page};
use crate::gc::swiper::old::{OldGen, OldGenProtected};
use crate::gc::swiper::young::YoungGen;
use crate::gc::swiper::Page;
use crate::gc::swiper::{walk_region, INITIAL_METADATA_OLD};
use crate::gc::{fill_region_with, iterate_strong_roots, iterate_weak_roots, marking, Slot};
use crate::gc::{Address, GcReason, Region};
Expand Down Expand Up @@ -360,7 +361,7 @@ impl<'a> FullCollector<'a> {
return;
}

if let Some(new_address) = self.old_protected.allocate(self.vm, self.old, size) {
if let Some(new_address) = self.old_protected.allocate(self.vm, self.old, size, size) {
let object_end = new_address.offset(size);

object.copy_to(new_address, size);
Expand Down
58 changes: 22 additions & 36 deletions dora-runtime/src/gc/swiper/minor.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,12 +9,12 @@ use crate::gc::swiper::card::{CardEntry, CardTable};
use crate::gc::swiper::controller::{MinorCollectorPhases, SharedHeapConfig};
use crate::gc::swiper::crossing::{CrossingEntry, CrossingMap};
use crate::gc::swiper::large::{LargeAlloc, LargeSpace};
use crate::gc::swiper::old::{OldGen, Page};
use crate::gc::swiper::old::OldGen;
use crate::gc::swiper::young::YoungGen;
use crate::gc::swiper::{
forward_minor, CardIdx, YoungAlloc, CARD_SIZE, INITIAL_METADATA_OLD, LARGE_OBJECT_SIZE,
forward_minor, CardIdx, Page, YoungAlloc, CARD_SIZE, INITIAL_METADATA_OLD, LARGE_OBJECT_SIZE,
};
use crate::gc::tlab::{TLAB_OBJECT_SIZE, TLAB_SIZE};
use crate::gc::tlab::{MAX_TLAB_OBJECT_SIZE, MAX_TLAB_SIZE, MIN_TLAB_SIZE};
use crate::gc::{
fill_region, fill_region_with, iterate_weak_roots, Address, GcReason, GenerationAllocator,
Region,
Expand Down Expand Up @@ -247,7 +247,6 @@ impl<'a> MinorCollector<'a> {

young_lab: Lab::new(),
young_alloc,
copy_failed: false,

timer: prot_timer,
};
Expand Down Expand Up @@ -332,12 +331,13 @@ impl Lab {

fn undo_alloc(&mut self, size: usize) {
self.top = (self.top.to_usize() - size).into();
debug_assert!(self.limit.offset_from(self.top) <= CLAB_SIZE);
debug_assert!(self.limit.offset_from(self.top) <= MAX_LAB_SIZE);
}
}

const CLAB_SIZE: usize = TLAB_SIZE;
const LAB_OBJECT_SIZE: usize = TLAB_OBJECT_SIZE;
const MIN_LAB_SIZE: usize = MIN_TLAB_SIZE;
const MAX_LAB_SIZE: usize = MAX_TLAB_SIZE;
const MAX_LAB_OBJECT_SIZE: usize = MAX_TLAB_OBJECT_SIZE;

const LOCAL_MAXIMUM: usize = 64;

Expand Down Expand Up @@ -376,7 +376,6 @@ struct CopyTask<'a> {

young_lab: Lab,
young_alloc: &'a YoungAlloc,
copy_failed: bool,

timer: &'a Option<Mutex<(Timer, f32)>>,
}
Expand Down Expand Up @@ -691,23 +690,21 @@ impl<'a> CopyTask<'a> {
}

fn alloc_young(&mut self, size: usize) -> Address {
if size < LAB_OBJECT_SIZE {
if size < MAX_LAB_OBJECT_SIZE {
self.alloc_young_small(size)
} else {
self.alloc_young_medium(size)
}
}

fn alloc_young_small(&mut self, size: usize) -> Address {
debug_assert!(size < LAB_OBJECT_SIZE);
debug_assert!(size < MAX_LAB_OBJECT_SIZE);

if let Some(object_start) = self.young_lab.allocate(size) {
return object_start;
} else if self.copy_failed {
return Address::null();
}

debug_assert!(size <= CLAB_SIZE);
debug_assert!(size <= MAX_LAB_SIZE);
self.young_lab.make_iterable_young(self.vm);
if !self.alloc_young_lab() {
return Address::null();
Expand All @@ -717,39 +714,28 @@ impl<'a> CopyTask<'a> {
}

fn alloc_young_medium(&mut self, size: usize) -> Address {
debug_assert!(LAB_OBJECT_SIZE <= size && size < LARGE_OBJECT_SIZE);

if self.copy_failed {
return Address::null();
}
debug_assert!(MAX_LAB_OBJECT_SIZE <= size && size < LARGE_OBJECT_SIZE);

if let Some(result) = self.young_alloc.alloc(self.vm, size) {
if let Some(result) = self.young_alloc.alloc(self.vm, size, size) {
result
} else {
self.copy_failed = true;

Address::null()
}
}

fn alloc_young_lab(&mut self) -> bool {
if self.copy_failed {
return false;
}

if let Some(lab_start) = self.young_alloc.alloc(self.vm, CLAB_SIZE) {
let lab_end = lab_start.offset(CLAB_SIZE);
if let Some(lab_start) = self.young_alloc.alloc(self.vm, MIN_LAB_SIZE, MAX_LAB_SIZE) {
let lab_end = lab_start.offset(MAX_LAB_SIZE);
self.young_lab.reset(lab_start, lab_end);
true
} else {
self.copy_failed = true;
self.young_lab.reset(Address::null(), Address::null());
false
}
}

fn undo_alloc_young(&mut self, copy_addr: Address, size: usize) {
if size < LAB_OBJECT_SIZE {
if size < MAX_LAB_OBJECT_SIZE {
self.young_lab.undo_alloc(size)
} else {
// Can't undo mid-sized objects. Need to make the heap iterable.
Expand All @@ -758,15 +744,15 @@ impl<'a> CopyTask<'a> {
}

fn alloc_old(&mut self, size: usize) -> Address {
if size < LAB_OBJECT_SIZE {
if size < MAX_LAB_OBJECT_SIZE {
self.alloc_old_small(size)
} else {
self.alloc_old_medium(size)
}
}

fn alloc_old_small(&mut self, size: usize) -> Address {
debug_assert!(size < LAB_OBJECT_SIZE);
debug_assert!(size < MAX_LAB_OBJECT_SIZE);
let object_start = self.alloc_object_in_old_lab(size);

if let Some(object_start) = object_start {
Expand All @@ -783,9 +769,9 @@ impl<'a> CopyTask<'a> {
}

fn alloc_old_medium(&mut self, size: usize) -> Address {
debug_assert!(LAB_OBJECT_SIZE <= size && size < LARGE_OBJECT_SIZE);
debug_assert!(MAX_LAB_OBJECT_SIZE <= size && size < LARGE_OBJECT_SIZE);

if let Some(object_start) = self.old.allocate(self.vm, size) {
if let Some(object_start) = self.old.allocate(self.vm, size, size) {
let old = object_start;
let new = old.offset(size);
self.old.update_crossing(old, new);
Expand All @@ -796,7 +782,7 @@ impl<'a> CopyTask<'a> {
}

fn undo_alloc_old(&mut self, copy_addr: Address, size: usize) {
if size < LAB_OBJECT_SIZE {
if size < MAX_LAB_OBJECT_SIZE {
self.old_lab.undo_alloc(size);
} else {
// Can't undo mid-sized objects. Need to make the heap iterable.
Expand All @@ -805,8 +791,8 @@ impl<'a> CopyTask<'a> {
}

fn alloc_old_lab(&mut self) -> bool {
if let Some(lab_start) = self.old.allocate(self.vm, CLAB_SIZE) {
let lab_end = lab_start.offset(CLAB_SIZE);
if let Some(lab_start) = self.old.allocate(self.vm, MIN_LAB_SIZE, MAX_LAB_SIZE) {
let lab_end = lab_start.offset(MAX_LAB_SIZE);
self.old_lab.reset(lab_start, lab_end);

true
Expand Down
Loading

0 comments on commit fe2ecbc

Please sign in to comment.