Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Further allocator refactoring #1173

Merged
merged 6 commits into from
Dec 9, 2024
Merged
Show file tree
Hide file tree
Changes from 4 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion include/runtime/alloc.h
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ extern "C" {

char youngspace_collection_id(void);
char oldspace_collection_id(void);
size_t youngspace_size(void);
//size_t youngspace_size(void);
stevenmeker marked this conversation as resolved.
Show resolved Hide resolved

// allocates exactly requested bytes into the young generation
void *kore_alloc(size_t requested);
Expand Down
71 changes: 29 additions & 42 deletions include/runtime/arena.h
Original file line number Diff line number Diff line change
Expand Up @@ -27,32 +27,21 @@ class arena {
void *kore_arena_alloc(size_t requested);

// Returns the address of the first byte that belongs in the given arena.
// Returns 0 if nothing has been allocated ever in that arena.
char *arena_start_ptr() const {
return current_addr_ptr ? current_addr_ptr + sizeof(memory_block_header)
: nullptr;
}
// Returns nullptr if nothing has been allocated ever in that arena.
char *arena_start_ptr() const { return current_addr_ptr; }

// Returns a pointer to a location holding the address of last allocated
// byte in the given arena plus 1.
// This address is 0 if nothing has been allocated ever in that arena.
// This address is nullptr if nothing has been allocated ever in that arena.
char **arena_end_ptr() { return &allocation_ptr; }

// return the total number of allocatable bytes currently in the arena in its
// active semispace.
size_t arena_size() const {
update_num_blocks();
return BLOCK_SIZE * std::max(num_blocks, num_collection_blocks);
}

// Clears the current allocation space by setting its start back to its first
// block. It is used during garbage collection to effectively collect all of the
// arena.
// arena. Resets the tripwire.
void arena_clear();

// Resizes the last allocation as long as the resize does not require a new
// block allocation.
// Returns the address of the byte following the last newlly allocated byte.
// Resizes the last allocation.
// Returns the address of the byte following the last newly allocated byte.
void *arena_resize_last_alloc(ssize_t increase) {
return (allocation_ptr += increase);
}
Expand All @@ -71,10 +60,8 @@ class arena {
void arena_swap_and_clear();

// Given two pointers to objects allocated in the same arena, return the number
// of bytes they are separated by within the virtual block of memory represented
// by the blocks of that arena. This difference will include blocks containing
// sentinel bytes. Undefined behavior will result if the pointers belong to
// different arenas.
// of bytes they are apart. Undefined behavior will result if the pointers
// don't belong to the same arena
static ssize_t ptr_diff(char *ptr1, char *ptr2) { return ptr1 - ptr2; }

// Given a starting pointer to an address allocated in an arena and a size in
Expand All @@ -84,11 +71,11 @@ class arena {
// 1st argument: the starting pointer
// 2nd argument: the size in bytes to add to the starting pointer
// 3rd argument: the address of last allocated byte in the arena plus 1
// Return value: the address allocated in the arena after size bytes from the
// starting pointer, or 0 if this is equal to the 3rd argument.
// Return value: starting pointer + size unless this points to unallocated space
// in which case nullptr is returned
static char *move_ptr(char *ptr, size_t size, char const *arena_end_ptr) {
char *next_ptr = ptr + size;
return (next_ptr == arena_end_ptr) ? 0 : next_ptr;
return (next_ptr == arena_end_ptr) ? nullptr : next_ptr;
}

// Returns the ID of the semispace where the given address was allocated.
Expand All @@ -97,15 +84,6 @@ class arena {
static char get_arena_semispace_id_of_object(void *ptr);

private:
union memory_block_header {
//
// Currently the header just holds the semispace id. But we need it to be a
// multiple of sizeof(char*) for alignment purposes so we add a dummy char*.
//
char semispace;
char *alignment_dummy;
};

//
// We update the number of 1MB blocks actually written to, only when we need this value,
// or before a garbage collection rather than trying to determine when we write to a fresh block.
Expand All @@ -121,13 +99,6 @@ class arena {
}

void initialize_semispace();

static memory_block_header *mem_block_header(void *ptr) {
uintptr_t address = reinterpret_cast<uintptr_t>(ptr);
return reinterpret_cast<arena::memory_block_header *>(
(address - 1) & ~(HYPERBLOCK_SIZE - 1));
}

//
// Current semispace where allocations are being made.
//
Expand All @@ -146,6 +117,19 @@ class arena {
= 0; // notional number of BLOCK_SIZE blocks in collection semispace
};

inline char arena::get_arena_semispace_id_of_object(void *ptr) {
//
// We don't have to deal with the "1 past the end of block" case because
// a valid pointer will always point into our hyperblock - we will never return
// an allocation anywhere near the end of our hyperblock.
//
// Set the low bits to 1 to get the address of the last byte in the hyperblock.
//
uintptr_t end_address
= reinterpret_cast<uintptr_t>(ptr) | (HYPERBLOCK_SIZE - 1);
return *reinterpret_cast<char *>(end_address);
}

// Macro to define a new arena with the given ID. Supports IDs ranging from 0 to
// 127.
#define REGISTER_ARENA(name, id) static thread_local arena name(id)
Expand All @@ -169,8 +153,11 @@ inline void *arena::kore_arena_alloc(size_t requested) {
// collect when allowed.
//
time_for_collection = true;
tripwire = current_addr_ptr
+ HYPERBLOCK_SIZE; // won't trigger again until arena swap
//
// We move the tripwire to 1 past the end of our hyperblock so that we have
// a well defined comparison that will always be false until the next arena swap.
//
tripwire = current_addr_ptr + HYPERBLOCK_SIZE;
}
void *result = allocation_ptr;
allocation_ptr += requested;
Expand Down
17 changes: 6 additions & 11 deletions runtime/alloc/arena.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -11,11 +11,6 @@

extern size_t const VAR_BLOCK_SIZE = BLOCK_SIZE;

__attribute__((always_inline)) char
arena::get_arena_semispace_id_of_object(void *ptr) {
return mem_block_header(ptr)->semispace;
}

#ifdef __MACH__
//
// thread_local disabled for Apple
Expand Down Expand Up @@ -46,19 +41,19 @@ void arena::initialize_semispace() {
}
//
// We allocated 2 * HYPERBLOCK_SIZE worth of address space but we're only going to use 1, aligned on a
// HYPERBLOCK_SIZE boundry. This is so we can get the start of the hyperblock by masking any address within it.
// HYPERBLOCK_SIZE boundry. This is so we can get end of the hyperblock by setting the low bits of any
// address within the space to 1.
// We don't worry about unused address space either side of our aligned address space because there will be no
// memory mapped to it.
//
current_addr_ptr = reinterpret_cast<char *>(
std::align(HYPERBLOCK_SIZE, HYPERBLOCK_SIZE, addr, request));
//
// We put a memory_block_header at the beginning so we can identify the semispace a pointer belongs to
// id by masking off the low bits to access this memory_block_header.
// We put a semispace id in the last byte of the hyperblock so we can identify which semispace an address
// belongs to by setting the low bits to 1 to access this id.
//
auto *header = reinterpret_cast<memory_block_header *>(current_addr_ptr);
header->semispace = allocation_semispace_id;
allocation_ptr = current_addr_ptr + sizeof(arena::memory_block_header);
current_addr_ptr[HYPERBLOCK_SIZE - 1] = allocation_semispace_id;
allocation_ptr = current_addr_ptr;
//
// We set the tripwire for this space so we get trigger a garbage collection when we pass BLOCK_SIZE of memory
// allocated from this space.
Expand Down
4 changes: 0 additions & 4 deletions runtime/lto/alloc.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -39,10 +39,6 @@ char oldspace_collection_id() {
return oldspace.get_arena_collection_semispace_id();
}

size_t youngspace_size(void) {
return youngspace.arena_size();
}

void kore_alloc_swap(bool swap_old) {
youngspace.arena_swap_and_clear();
if (swap_old) {
Expand Down
Loading