Skip to content

Commit

Permalink
fixed formatting
Browse files Browse the repository at this point in the history
  • Loading branch information
stevenmeker committed Dec 2, 2024
1 parent 4318df9 commit 7a2df99
Show file tree
Hide file tree
Showing 2 changed files with 60 additions and 40 deletions.
80 changes: 49 additions & 31 deletions include/runtime/arena.h
Original file line number Diff line number Diff line change
@@ -1,11 +1,11 @@
#ifndef ARENA_H
#define ARENA_H

#include <algorithm>
#include <cstddef>
#include <cstdint>
#include <utility>
#include <algorithm>
#include <sys/types.h>
#include <utility>

#include "runtime/alloc.h"

Expand All @@ -17,25 +17,33 @@ size_t const HYPERBLOCK_SIZE = (size_t)BLOCK_SIZE * 1024 * 1024;
// once.
class arena {
public:
arena(char id) : allocation_semispace_id(id) { initialize_semispace(); }
arena(char id)
: allocation_semispace_id(id) {
initialize_semispace();
}

// Allocates the requested number of bytes as a contiguous region and returns a
// pointer to the first allocated byte.
void *kore_arena_alloc(size_t requested);

// Returns the address of the first byte that belongs in the given arena.
// Returns 0 if nothing has been allocated ever in that arena.
char *arena_start_ptr() const { return current_addr_ptr ? current_addr_ptr + sizeof(memory_block_header) : nullptr; }
char *arena_start_ptr() const {
return current_addr_ptr ? current_addr_ptr + sizeof(memory_block_header)
: nullptr;
}

// Returns a pointer to a location holding the address of last allocated
// byte in the given arena plus 1.
// This address is 0 if nothing has been allocated ever in that arena.
char **arena_end_ptr() { return &allocation_ptr; }


// return the total number of allocatable bytes currently in the arena in its
// active semispace.
size_t arena_size() const { update_num_blocks(); return BLOCK_SIZE * std::max(num_blocks, num_collection_blocks); }
size_t arena_size() const {
update_num_blocks();
return BLOCK_SIZE * std::max(num_blocks, num_collection_blocks);
}

// Clears the current allocation space by setting its start back to its first
// block. It is used during garbage collection to effectively collect all of the
Expand All @@ -45,13 +53,17 @@ class arena {
// Resizes the last allocation as long as the resize does not require a new
// block allocation.
// Returns the address of the byte following the last newlly allocated byte.
void *arena_resize_last_alloc(ssize_t increase) { return (allocation_ptr += increase); }
void *arena_resize_last_alloc(ssize_t increase) {
return (allocation_ptr += increase);
}

// Returns the given arena's current collection semispace ID.
// Each arena has 2 semispace IDs one equal to the arena ID and the other equal
// to the 1's complement of the arena ID. At any time one of these semispaces
// is used for allocation and the other is used for collection.
char get_arena_collection_semispace_id() const { return ~allocation_semispace_id; }
char get_arena_collection_semispace_id() const {
return ~allocation_semispace_id;
}

// Exchanges the current allocation and collection semispaces and clears the new
// current allocation semispace by setting its start back to its first block.
Expand Down Expand Up @@ -102,31 +114,36 @@ class arena {
//
// Calculate how many 1M blocks of the current arena we used.
//
size_t num_used_blocks = (allocation_ptr - current_addr_ptr - 1) / BLOCK_SIZE + 1;
size_t num_used_blocks
= (allocation_ptr - current_addr_ptr - 1) / BLOCK_SIZE + 1;
if (num_used_blocks > num_blocks)
num_blocks = num_used_blocks;
}

void initialize_semispace();

static memory_block_header *mem_block_header(void *ptr) {
uintptr_t address = reinterpret_cast<uintptr_t>(ptr);
return reinterpret_cast<arena::memory_block_header *>((address - 1) & ~(HYPERBLOCK_SIZE - 1));
return reinterpret_cast<arena::memory_block_header *>(
(address - 1) & ~(HYPERBLOCK_SIZE - 1));
}

//
// Current semispace where allocations are being made.
//
char *current_addr_ptr; // pointer to start of current address space
char *allocation_ptr; // next available location in current semispace
char *tripwire; // allocating past this triggers slow allocation
mutable size_t num_blocks; // notional number of BLOCK_SIZE blocks in current semispace
char allocation_semispace_id; // id of current semispace
char *current_addr_ptr; // pointer to start of current address space
char *allocation_ptr; // next available location in current semispace
char *tripwire; // allocating past this triggers slow allocation
mutable size_t
num_blocks; // notional number of BLOCK_SIZE blocks in current semispace
char allocation_semispace_id; // id of current semispace
//
// Semispace where allocations will be made during and after garbage collect.
//
char *collection_addr_ptr = nullptr; // pointer to start of collection address space
size_t num_collection_blocks = 0; // notional number of BLOCK_SIZE blocks in collection semispace
char *collection_addr_ptr
= nullptr; // pointer to start of collection address space
size_t num_collection_blocks
= 0; // notional number of BLOCK_SIZE blocks in collection semispace
};

// Macro to define a new arena with the given ID. Supports IDs ranging from 0 to
Expand Down Expand Up @@ -155,11 +172,14 @@ inline void *arena::kore_arena_alloc(size_t requested) {
// collect when allowed.
//
time_for_collection = true;
tripwire = current_addr_ptr + HYPERBLOCK_SIZE; // won't trigger again until arena swap
tripwire = current_addr_ptr
+ HYPERBLOCK_SIZE; // won't trigger again until arena swap
}
void *result = allocation_ptr;
allocation_ptr += requested;
MEM_LOG("Allocation at %p (size %zd), next alloc at %p\n", result, requested, block);
MEM_LOG(
"Allocation at %p (size %zd), next alloc at %p\n", result, requested,
block);
return result;
}

Expand All @@ -174,24 +194,22 @@ inline void arena::arena_clear() {
// Otherwise we only want to generate a garbage collect if we allocate off the
// end of this area.
//
tripwire = current_addr_ptr + (num_blocks - (num_blocks >= get_gc_threshold())) * BLOCK_SIZE;
tripwire = current_addr_ptr
+ (num_blocks - (num_blocks >= get_gc_threshold())) * BLOCK_SIZE;
}

inline void arena::arena_swap_and_clear() {
update_num_blocks(); // so we save the correct number of touched blocks
update_num_blocks(); // so we save the correct number of touched blocks
std::swap(current_addr_ptr, collection_addr_ptr);
std::swap(num_blocks, num_collection_blocks);
allocation_semispace_id = ~allocation_semispace_id;
if (current_addr_ptr == nullptr)
{
//
// The other semispace hasn't be initialized yet.
//
void initialize_semispace();
}
else
if (current_addr_ptr == nullptr) {
//
// The other semispace hasn't be initialized yet.
//
void initialize_semispace();
} else
arena_clear();
}

}
#endif // ARENA_H
20 changes: 11 additions & 9 deletions runtime/alloc/arena.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -33,13 +33,13 @@ void arena::initialize_semispace() {
//
size_t request = 2 * HYPERBLOCK_SIZE;
void *addr = mmap(
nullptr, // let OS choose the address
request, // Linux and MacOS both allow up to 64TB
PROT_READ | PROT_WRITE, // read, write but not execute
MAP_ANONYMOUS | MAP_PRIVATE
| MAP_NORESERVE, // allocate address space only
-1, // no file backing
0); // no offset
nullptr, // let OS choose the address
request, // Linux and MacOS both allow up to 64TB
PROT_READ | PROT_WRITE, // read, write but not execute
MAP_ANONYMOUS | MAP_PRIVATE
| MAP_NORESERVE, // allocate address space only
-1, // no file backing
0); // no offset
if (addr == MAP_FAILED) {
perror("mmap()");
abort();
Expand All @@ -50,12 +50,14 @@ void arena::initialize_semispace() {
// We don't worry about unused address space either side of our aligned address space because there will be no
// memory mapped to it.
//
current_addr_ptr = reinterpret_cast<char *>(std::align(HYPERBLOCK_SIZE, HYPERBLOCK_SIZE, addr, request));
current_addr_ptr = reinterpret_cast<char *>(
std::align(HYPERBLOCK_SIZE, HYPERBLOCK_SIZE, addr, request));
//
// We put a memory_block_header at the beginning so we can identify the semispace a pointer belongs to
// id by masking off the low bits to access this memory_block_header.
//
memory_block_header *header = reinterpret_cast<memory_block_header *>(current_addr_ptr);
memory_block_header *header
= reinterpret_cast<memory_block_header *>(current_addr_ptr);
header->semispace = allocation_semispace_id;
allocation_ptr = current_addr_ptr + sizeof(arena::memory_block_header);
//
Expand Down

0 comments on commit 7a2df99

Please sign in to comment.