diff --git a/dora-runtime/src/compiler/asm.rs b/dora-runtime/src/compiler/asm.rs
index 6bc799b19..3e0f66634 100644
--- a/dora-runtime/src/compiler/asm.rs
+++ b/dora-runtime/src/compiler/asm.rs
@@ -7,7 +7,7 @@ use crate::cpu::{
FReg, Reg, FREG_RESULT, REG_PARAMS, REG_RESULT, REG_SP, REG_THREAD, REG_TMP1, REG_TMP2,
STACK_FRAME_ALIGNMENT,
};
-use crate::gc::tlab::TLAB_OBJECT_SIZE;
+use crate::gc::tlab::MAX_TLAB_OBJECT_SIZE;
use crate::gc::Address;
use crate::masm::{CondCode, Label, MacroAssembler, Mem, ScratchReg};
use crate::mode::MachineMode;
@@ -822,12 +822,12 @@ impl<'a> BaselineAssembler<'a> {
match size {
AllocationSize::Dynamic(reg_size) => {
self.masm
- .cmp_reg_imm(MachineMode::Ptr, reg_size, TLAB_OBJECT_SIZE as i32);
+ .cmp_reg_imm(MachineMode::Ptr, reg_size, MAX_TLAB_OBJECT_SIZE as i32);
self.masm.jump_if(CondCode::GreaterEq, lbl_slow_path);
}
AllocationSize::Fixed(size) => {
- assert!(size < TLAB_OBJECT_SIZE);
+ assert!(size < MAX_TLAB_OBJECT_SIZE);
}
}
@@ -978,7 +978,7 @@ impl<'a> BaselineAssembler<'a> {
match size {
AllocationSize::Fixed(fixed_size) => {
- if fixed_size < TLAB_OBJECT_SIZE {
+ if fixed_size < MAX_TLAB_OBJECT_SIZE {
self.tlab_allocate(dest, size, location, gcpoint);
} else {
self.gc_allocate(dest, size, location, gcpoint);
diff --git a/dora-runtime/src/gc.rs b/dora-runtime/src/gc.rs
index e5d905832..b02c1a442 100644
--- a/dora-runtime/src/gc.rs
+++ b/dora-runtime/src/gc.rs
@@ -12,7 +12,7 @@ use crate::gc::copy::CopyCollector;
use crate::gc::space::{Space, SpaceConfig};
use crate::gc::sweep::SweepCollector;
use crate::gc::swiper::{Swiper, CARD_SIZE};
-use crate::gc::tlab::TLAB_OBJECT_SIZE;
+use crate::gc::tlab::MAX_TLAB_OBJECT_SIZE;
use crate::gc::zero::ZeroCollector;
use crate::mem;
use crate::object::{Header, Obj};
@@ -127,7 +127,7 @@ impl Gc {
self.collect(vm, GcReason::Stress);
}
- if size < TLAB_OBJECT_SIZE && self.supports_tlab {
+ if size < MAX_TLAB_OBJECT_SIZE && self.supports_tlab {
self.alloc_tlab(vm, size)
} else {
self.collector.alloc(vm, size)
diff --git a/dora-runtime/src/gc/allocator.rs b/dora-runtime/src/gc/allocator.rs
index 3244879e6..e0d3f7426 100644
--- a/dora-runtime/src/gc/allocator.rs
+++ b/dora-runtime/src/gc/allocator.rs
@@ -2,6 +2,6 @@ use crate::gc::{Address, Region};
use crate::vm::VM;
pub trait GenerationAllocator {
- fn allocate(&self, vm: &VM, size: usize) -> Option
;
+ fn allocate(&self, vm: &VM, min_size: usize, max_size: usize) -> Option;
fn free(&self, region: Region);
}
diff --git a/dora-runtime/src/gc/swiper.rs b/dora-runtime/src/gc/swiper.rs
index ab96c09e0..198953ba0 100644
--- a/dora-runtime/src/gc/swiper.rs
+++ b/dora-runtime/src/gc/swiper.rs
@@ -4,9 +4,9 @@ use std::fmt;
use std::mem::size_of;
use std::sync::Arc;
-pub use crate::gc::swiper::old::Page;
pub use crate::gc::swiper::young::YoungAlloc;
+use crate::gc::allocator::GenerationAllocator;
use crate::gc::root::{determine_strong_roots, Slot};
use crate::gc::swiper::card::CardTable;
use crate::gc::swiper::controller::{HeapController, SharedHeapConfig};
@@ -45,11 +45,11 @@ const YOUNG_RATIO: usize = 2;
// heap is divided into cards of size CARD_SIZE.
// card entry determines whether this part of the heap was modified
// in minor collections those parts of the heap need to be analyzed
-pub const CARD_SIZE: usize = 512;
pub const CARD_SIZE_BITS: usize = 9;
+pub const CARD_SIZE: usize = 1 << CARD_SIZE_BITS;
pub const CARD_REFS: usize = CARD_SIZE / size_of::();
-pub const LARGE_OBJECT_SIZE: usize = 64 * K;
+pub const LARGE_OBJECT_SIZE: usize = 32 * K;
pub const PAGE_SIZE: usize = 128 * K;
pub const PAGE_HEADER_SIZE: usize = 64 * K;
@@ -380,13 +380,15 @@ impl Swiper {
}
fn alloc_normal(&self, vm: &VM, size: usize) -> Address {
- if let Some(address) = self.young.bump_alloc(vm, size) {
+ if let Some(address) = self.young.allocate(vm, size, size) {
return address;
}
self.perform_collection_and_choose(vm, GcReason::AllocationFailure);
- self.young.bump_alloc(vm, size).unwrap_or(Address::null())
+ self.young
+ .allocate(vm, size, size)
+ .unwrap_or(Address::null())
}
fn alloc_large(&self, vm: &VM, size: usize) -> Address {
@@ -406,19 +408,19 @@ impl Collector for Swiper {
}
fn alloc_tlab_area(&self, vm: &VM, size: usize) -> Option {
- if let Some(address) = self.young.bump_alloc(vm, size) {
+ if let Some(address) = self.young.allocate(vm, size, size) {
return Some(address.region_start(size));
}
self.perform_collection_and_choose(vm, GcReason::AllocationFailure);
- if let Some(address) = self.young.bump_alloc(vm, size) {
+ if let Some(address) = self.young.allocate(vm, size, size) {
return Some(address.region_start(size));
}
self.perform_collection(vm, CollectionKind::Full, GcReason::AllocationFailure);
- if let Some(address) = self.young.bump_alloc(vm, size) {
+ if let Some(address) = self.young.allocate(vm, size, size) {
return Some(address.region_start(size));
}
@@ -635,3 +637,56 @@ fn forward_minor(object: Address, young: Region) -> Option {
Some(object)
}
}
+
+#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
+pub struct Page(Address);
+
+impl Page {
+ pub fn new(start: Address) -> Page {
+ Page(start)
+ }
+
+ pub fn from_address(value: Address) -> Page {
+ let page_start = value.to_usize() & !(PAGE_SIZE - 1);
+ Page::new(page_start.into())
+ }
+
+ pub fn initialize_header(&self) {
+ unsafe {
+ let header = std::slice::from_raw_parts_mut(
+ self.start().to_mut_ptr::(),
+ PAGE_HEADER_SIZE / mem::ptr_width_usize(),
+ );
+
+ header.fill(0xDEAD2BAD);
+ }
+ }
+
+ pub fn area(&self) -> Region {
+ Region::new(self.start(), self.end())
+ }
+
+ pub fn start(&self) -> Address {
+ self.0
+ }
+
+ pub fn end(&self) -> Address {
+ self.start().offset(PAGE_SIZE)
+ }
+
+ pub fn size(&self) -> usize {
+ PAGE_SIZE
+ }
+
+ pub fn object_area(&self) -> Region {
+ Region::new(self.object_area_start(), self.object_area_end())
+ }
+
+ pub fn object_area_start(&self) -> Address {
+ self.start().offset(PAGE_HEADER_SIZE)
+ }
+
+ pub fn object_area_end(&self) -> Address {
+ self.end()
+ }
+}
diff --git a/dora-runtime/src/gc/swiper/full.rs b/dora-runtime/src/gc/swiper/full.rs
index 65385db44..95ad607ae 100644
--- a/dora-runtime/src/gc/swiper/full.rs
+++ b/dora-runtime/src/gc/swiper/full.rs
@@ -8,8 +8,9 @@ use crate::gc::swiper::card::CardTable;
use crate::gc::swiper::controller::FullCollectorPhases;
use crate::gc::swiper::crossing::CrossingMap;
use crate::gc::swiper::large::LargeSpace;
-use crate::gc::swiper::old::{OldGen, OldGenProtected, Page};
+use crate::gc::swiper::old::{OldGen, OldGenProtected};
use crate::gc::swiper::young::YoungGen;
+use crate::gc::swiper::Page;
use crate::gc::swiper::{walk_region, INITIAL_METADATA_OLD};
use crate::gc::{fill_region_with, iterate_strong_roots, iterate_weak_roots, marking, Slot};
use crate::gc::{Address, GcReason, Region};
@@ -360,7 +361,7 @@ impl<'a> FullCollector<'a> {
return;
}
- if let Some(new_address) = self.old_protected.allocate(self.vm, self.old, size) {
+ if let Some(new_address) = self.old_protected.allocate(self.vm, self.old, size, size) {
let object_end = new_address.offset(size);
object.copy_to(new_address, size);
diff --git a/dora-runtime/src/gc/swiper/minor.rs b/dora-runtime/src/gc/swiper/minor.rs
index 949443c0f..0844ad682 100644
--- a/dora-runtime/src/gc/swiper/minor.rs
+++ b/dora-runtime/src/gc/swiper/minor.rs
@@ -9,12 +9,12 @@ use crate::gc::swiper::card::{CardEntry, CardTable};
use crate::gc::swiper::controller::{MinorCollectorPhases, SharedHeapConfig};
use crate::gc::swiper::crossing::{CrossingEntry, CrossingMap};
use crate::gc::swiper::large::{LargeAlloc, LargeSpace};
-use crate::gc::swiper::old::{OldGen, Page};
+use crate::gc::swiper::old::OldGen;
use crate::gc::swiper::young::YoungGen;
use crate::gc::swiper::{
- forward_minor, CardIdx, YoungAlloc, CARD_SIZE, INITIAL_METADATA_OLD, LARGE_OBJECT_SIZE,
+ forward_minor, CardIdx, Page, YoungAlloc, CARD_SIZE, INITIAL_METADATA_OLD, LARGE_OBJECT_SIZE,
};
-use crate::gc::tlab::{TLAB_OBJECT_SIZE, TLAB_SIZE};
+use crate::gc::tlab::{MAX_TLAB_OBJECT_SIZE, MAX_TLAB_SIZE, MIN_TLAB_SIZE};
use crate::gc::{
fill_region, fill_region_with, iterate_weak_roots, Address, GcReason, GenerationAllocator,
Region,
@@ -247,7 +247,6 @@ impl<'a> MinorCollector<'a> {
young_lab: Lab::new(),
young_alloc,
- copy_failed: false,
timer: prot_timer,
};
@@ -332,12 +331,13 @@ impl Lab {
fn undo_alloc(&mut self, size: usize) {
self.top = (self.top.to_usize() - size).into();
- debug_assert!(self.limit.offset_from(self.top) <= CLAB_SIZE);
+ debug_assert!(self.limit.offset_from(self.top) <= MAX_LAB_SIZE);
}
}
-const CLAB_SIZE: usize = TLAB_SIZE;
-const LAB_OBJECT_SIZE: usize = TLAB_OBJECT_SIZE;
+const MIN_LAB_SIZE: usize = MIN_TLAB_SIZE;
+const MAX_LAB_SIZE: usize = MAX_TLAB_SIZE;
+const MAX_LAB_OBJECT_SIZE: usize = MAX_TLAB_OBJECT_SIZE;
const LOCAL_MAXIMUM: usize = 64;
@@ -376,7 +376,6 @@ struct CopyTask<'a> {
young_lab: Lab,
young_alloc: &'a YoungAlloc,
- copy_failed: bool,
timer: &'a Option>,
}
@@ -691,7 +690,7 @@ impl<'a> CopyTask<'a> {
}
fn alloc_young(&mut self, size: usize) -> Address {
- if size < LAB_OBJECT_SIZE {
+ if size < MAX_LAB_OBJECT_SIZE {
self.alloc_young_small(size)
} else {
self.alloc_young_medium(size)
@@ -699,15 +698,13 @@ impl<'a> CopyTask<'a> {
}
fn alloc_young_small(&mut self, size: usize) -> Address {
- debug_assert!(size < LAB_OBJECT_SIZE);
+ debug_assert!(size < MAX_LAB_OBJECT_SIZE);
if let Some(object_start) = self.young_lab.allocate(size) {
return object_start;
- } else if self.copy_failed {
- return Address::null();
}
- debug_assert!(size <= CLAB_SIZE);
+ debug_assert!(size <= MAX_LAB_SIZE);
self.young_lab.make_iterable_young(self.vm);
if !self.alloc_young_lab() {
return Address::null();
@@ -717,39 +714,28 @@ impl<'a> CopyTask<'a> {
}
fn alloc_young_medium(&mut self, size: usize) -> Address {
- debug_assert!(LAB_OBJECT_SIZE <= size && size < LARGE_OBJECT_SIZE);
-
- if self.copy_failed {
- return Address::null();
- }
+ debug_assert!(MAX_LAB_OBJECT_SIZE <= size && size < LARGE_OBJECT_SIZE);
- if let Some(result) = self.young_alloc.alloc(self.vm, size) {
+ if let Some(result) = self.young_alloc.alloc(self.vm, size, size) {
result
} else {
- self.copy_failed = true;
-
Address::null()
}
}
fn alloc_young_lab(&mut self) -> bool {
- if self.copy_failed {
- return false;
- }
-
- if let Some(lab_start) = self.young_alloc.alloc(self.vm, CLAB_SIZE) {
- let lab_end = lab_start.offset(CLAB_SIZE);
+ if let Some(lab_start) = self.young_alloc.alloc(self.vm, MIN_LAB_SIZE, MAX_LAB_SIZE) {
+ let lab_end = lab_start.offset(MAX_LAB_SIZE);
self.young_lab.reset(lab_start, lab_end);
true
} else {
- self.copy_failed = true;
self.young_lab.reset(Address::null(), Address::null());
false
}
}
fn undo_alloc_young(&mut self, copy_addr: Address, size: usize) {
- if size < LAB_OBJECT_SIZE {
+ if size < MAX_LAB_OBJECT_SIZE {
self.young_lab.undo_alloc(size)
} else {
// Can't undo mid-sized objects. Need to make the heap iterable.
@@ -758,7 +744,7 @@ impl<'a> CopyTask<'a> {
}
fn alloc_old(&mut self, size: usize) -> Address {
- if size < LAB_OBJECT_SIZE {
+ if size < MAX_LAB_OBJECT_SIZE {
self.alloc_old_small(size)
} else {
self.alloc_old_medium(size)
@@ -766,7 +752,7 @@ impl<'a> CopyTask<'a> {
}
fn alloc_old_small(&mut self, size: usize) -> Address {
- debug_assert!(size < LAB_OBJECT_SIZE);
+ debug_assert!(size < MAX_LAB_OBJECT_SIZE);
let object_start = self.alloc_object_in_old_lab(size);
if let Some(object_start) = object_start {
@@ -783,9 +769,9 @@ impl<'a> CopyTask<'a> {
}
fn alloc_old_medium(&mut self, size: usize) -> Address {
- debug_assert!(LAB_OBJECT_SIZE <= size && size < LARGE_OBJECT_SIZE);
+ debug_assert!(MAX_LAB_OBJECT_SIZE <= size && size < LARGE_OBJECT_SIZE);
- if let Some(object_start) = self.old.allocate(self.vm, size) {
+ if let Some(object_start) = self.old.allocate(self.vm, size, size) {
let old = object_start;
let new = old.offset(size);
self.old.update_crossing(old, new);
@@ -796,7 +782,7 @@ impl<'a> CopyTask<'a> {
}
fn undo_alloc_old(&mut self, copy_addr: Address, size: usize) {
- if size < LAB_OBJECT_SIZE {
+ if size < MAX_LAB_OBJECT_SIZE {
self.old_lab.undo_alloc(size);
} else {
// Can't undo mid-sized objects. Need to make the heap iterable.
@@ -805,8 +791,8 @@ impl<'a> CopyTask<'a> {
}
fn alloc_old_lab(&mut self) -> bool {
- if let Some(lab_start) = self.old.allocate(self.vm, CLAB_SIZE) {
- let lab_end = lab_start.offset(CLAB_SIZE);
+ if let Some(lab_start) = self.old.allocate(self.vm, MIN_LAB_SIZE, MAX_LAB_SIZE) {
+ let lab_end = lab_start.offset(MAX_LAB_SIZE);
self.old_lab.reset(lab_start, lab_end);
true
diff --git a/dora-runtime/src/gc/swiper/old.rs b/dora-runtime/src/gc/swiper/old.rs
index 4cb06071e..b47d51589 100644
--- a/dora-runtime/src/gc/swiper/old.rs
+++ b/dora-runtime/src/gc/swiper/old.rs
@@ -6,11 +6,9 @@ use crate::gc::freelist::FreeList;
use crate::gc::swiper::card::CardTable;
use crate::gc::swiper::controller::SharedHeapConfig;
use crate::gc::swiper::crossing::CrossingMap;
-use crate::gc::swiper::CommonOldGen;
-use crate::gc::swiper::{PAGE_HEADER_SIZE, PAGE_SIZE};
+use crate::gc::swiper::{CommonOldGen, Page, PAGE_SIZE};
use crate::gc::{fill_region, fill_region_with, is_page_aligned};
use crate::gc::{Address, GenerationAllocator, Region};
-use crate::mem::ptr_width_usize;
use crate::os::{self, MemoryPermission};
use crate::vm::VM;
@@ -91,9 +89,9 @@ impl CommonOldGen for OldGen {
}
impl GenerationAllocator for OldGen {
- fn allocate(&self, vm: &VM, size: usize) -> Option {
+ fn allocate(&self, vm: &VM, min_size: usize, max_size: usize) -> Option {
let mut protected = self.protected.lock();
- protected.allocate(vm, self, size)
+ protected.allocate(vm, self, min_size, max_size)
}
fn free(&self, _region: Region) {
@@ -151,8 +149,14 @@ impl OldGenProtected {
self.freelist.add(vm, start, size);
}
- pub fn allocate(&mut self, vm: &VM, old: &OldGen, size: usize) -> Option {
- if let Some(address) = self.raw_alloc(size) {
+ pub fn allocate(
+ &mut self,
+ vm: &VM,
+ old: &OldGen,
+ min_size: usize,
+ max_size: usize,
+ ) -> Option {
+ if let Some(address) = self.raw_alloc(min_size, max_size) {
fill_region_with(vm, self.top, self.current_limit, false);
old.update_crossing(self.top, self.current_limit);
return Some(address);
@@ -161,13 +165,15 @@ impl OldGenProtected {
fill_region_with(vm, self.top, self.current_limit, false);
old.update_crossing(self.top, self.current_limit);
- let free_space = self.freelist.alloc(size);
+ let free_space = self.freelist.alloc(min_size);
if free_space.is_non_null() {
self.top = free_space.addr();
self.current_limit = self.top.offset(free_space.size());
- let address = self.raw_alloc(size).expect("allocation failed");
+ let address = self
+ .raw_alloc(min_size, max_size)
+ .expect("allocation failed");
fill_region_with(vm, self.top, self.current_limit, false);
old.update_crossing(self.top, self.current_limit);
@@ -184,7 +190,7 @@ impl OldGenProtected {
self.top = page.object_area_start();
self.current_limit = page.object_area_end();
- let result = self.raw_alloc(size);
+ let result = self.raw_alloc(min_size, max_size);
assert!(result.is_some());
// Make rest of page iterable.
@@ -245,8 +251,8 @@ impl OldGenProtected {
}
}
- fn raw_alloc(&mut self, size: usize) -> Option {
- let next = self.top.offset(size);
+ fn raw_alloc(&mut self, _min_size: usize, max_size: usize) -> Option {
+ let next = self.top.offset(max_size);
if next <= self.current_limit {
let result = self.top;
@@ -257,56 +263,3 @@ impl OldGenProtected {
}
}
}
-
-#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
-pub struct Page(Address);
-
-impl Page {
- pub fn new(start: Address) -> Page {
- Page(start)
- }
-
- pub fn from_address(value: Address) -> Page {
- let page_start = value.to_usize() & !(PAGE_SIZE - 1);
- Page::new(page_start.into())
- }
-
- pub fn initialize_header(&self) {
- unsafe {
- let header = std::slice::from_raw_parts_mut(
- self.start().to_mut_ptr::(),
- PAGE_HEADER_SIZE / ptr_width_usize(),
- );
-
- header.fill(0xDEAD2BAD);
- }
- }
-
- pub fn area(&self) -> Region {
- Region::new(self.start(), self.end())
- }
-
- pub fn start(&self) -> Address {
- self.0
- }
-
- pub fn end(&self) -> Address {
- self.start().offset(PAGE_SIZE)
- }
-
- pub fn size(&self) -> usize {
- PAGE_SIZE
- }
-
- pub fn object_area(&self) -> Region {
- Region::new(self.object_area_start(), self.object_area_end())
- }
-
- pub fn object_area_start(&self) -> Address {
- self.start().offset(PAGE_HEADER_SIZE)
- }
-
- pub fn object_area_end(&self) -> Address {
- self.end()
- }
-}
diff --git a/dora-runtime/src/gc/swiper/verify.rs b/dora-runtime/src/gc/swiper/verify.rs
index 94d5f5d7c..c70b42c2c 100644
--- a/dora-runtime/src/gc/swiper/verify.rs
+++ b/dora-runtime/src/gc/swiper/verify.rs
@@ -6,10 +6,10 @@ use crate::gc::space::Space;
use crate::gc::swiper::card::{CardEntry, CardTable};
use crate::gc::swiper::crossing::{CrossingEntry, CrossingMap};
use crate::gc::swiper::large::LargeSpace;
-use crate::gc::swiper::old::{OldGen, OldGenProtected, Page};
+use crate::gc::swiper::old::{OldGen, OldGenProtected};
use crate::gc::swiper::on_different_cards;
use crate::gc::swiper::young::YoungGen;
-use crate::gc::swiper::CARD_SIZE;
+use crate::gc::swiper::{Page, CARD_SIZE};
use crate::gc::{Address, Region};
use crate::mem;
diff --git a/dora-runtime/src/gc/swiper/young.rs b/dora-runtime/src/gc/swiper/young.rs
index 2aea8d014..89e4cbe38 100644
--- a/dora-runtime/src/gc/swiper/young.rs
+++ b/dora-runtime/src/gc/swiper/young.rs
@@ -159,10 +159,6 @@ impl YoungGen {
self.alloc.reuse(from_gc);
}
- pub fn bump_alloc(&self, vm: &VM, size: usize) -> Option {
- self.alloc.alloc(vm, size)
- }
-
pub fn resize_after_gc(&self, vm: &VM, young_size: usize) {
let new_semi_size = young_size / 2;
assert_eq!(new_semi_size % PAGE_SIZE, 0);
@@ -253,8 +249,8 @@ impl YoungGen {
}
impl GenerationAllocator for YoungGen {
- fn allocate(&self, vm: &VM, size: usize) -> Option {
- self.alloc.alloc(vm, size)
+ fn allocate(&self, vm: &VM, min_size: usize, max_size: usize) -> Option {
+ self.alloc.alloc(vm, min_size, max_size)
}
fn free(&self, _region: Region) {
@@ -282,9 +278,9 @@ impl YoungAlloc {
}
}
- pub fn alloc(&self, vm: &VM, size: usize) -> Option {
+ pub fn alloc(&self, vm: &VM, min_size: usize, max_size: usize) -> Option {
let mut protected = self.protected.lock();
- protected.alloc(vm, size)
+ protected.alloc(vm, min_size, max_size)
}
fn reset(&self, region: Region) {
@@ -315,8 +311,8 @@ struct YoungAllocProtected {
}
impl YoungAllocProtected {
- fn alloc(&mut self, vm: &VM, size: usize) -> Option {
- if let Some(address) = self.raw_alloc(vm, size) {
+ fn alloc(&mut self, vm: &VM, min_size: usize, max_size: usize) -> Option {
+ if let Some(address) = self.raw_alloc(vm, min_size, max_size) {
return Some(address);
}
@@ -328,7 +324,7 @@ impl YoungAllocProtected {
self.current_limit = page.object_area_end();
assert!(self.current_limit <= self.limit);
fill_region_with(vm, self.top, self.current_limit, false);
- let result = self.raw_alloc(vm, size);
+ let result = self.raw_alloc(vm, min_size, max_size);
assert!(result.is_some());
result
} else {
@@ -336,8 +332,8 @@ impl YoungAllocProtected {
}
}
- fn raw_alloc(&mut self, vm: &VM, size: usize) -> Option {
- let next = self.top.offset(size);
+ fn raw_alloc(&mut self, vm: &VM, _min_size: usize, max_size: usize) -> Option {
+ let next = self.top.offset(max_size);
if next <= self.current_limit {
let result = self.top;
diff --git a/dora-runtime/src/gc/tlab.rs b/dora-runtime/src/gc/tlab.rs
index 4cbd21997..9aa038534 100644
--- a/dora-runtime/src/gc/tlab.rs
+++ b/dora-runtime/src/gc/tlab.rs
@@ -4,19 +4,20 @@ use crate::gc::{fill_region, Address, Region, K};
use crate::threads::{current_thread, DoraThread};
use crate::vm::VM;
-pub const TLAB_SIZE: usize = 32 * K;
-pub const TLAB_OBJECT_SIZE: usize = 8 * K;
+pub const MIN_TLAB_SIZE: usize = 8 * K;
+pub const MAX_TLAB_SIZE: usize = 32 * K;
+pub const MAX_TLAB_OBJECT_SIZE: usize = 8 * K;
pub fn initialize(tlab: Region) {
current_thread().tld.tlab_initialize(tlab.start, tlab.end);
}
pub fn calculate_size() -> usize {
- TLAB_SIZE
+ MAX_TLAB_SIZE
}
pub fn allocate(size: usize) -> Option {
- assert!(size < TLAB_OBJECT_SIZE);
+ assert!(size < MAX_TLAB_OBJECT_SIZE);
let thread = current_thread();
let tlab = thread.tld.tlab_region();