Skip to content

Commit

Permalink
Store CharAlloc in SwissTable in order to simplify type erasure of …
Browse files Browse the repository at this point in the history
…functions accepting allocator as `void*`.

This way we can more naturally use CharAlloc as template parameter for type erased functions.

PiperOrigin-RevId: 729010441
Change-Id: Ib733690335bbd349add0b290dc272ee9d02f3c7e
  • Loading branch information
goldvitaly authored and copybara-github committed Feb 20, 2025
1 parent 2844cda commit 7971b4a
Show file tree
Hide file tree
Showing 3 changed files with 109 additions and 71 deletions.
16 changes: 8 additions & 8 deletions absl/container/internal/raw_hash_set.cc
Original file line number Diff line number Diff line change
Expand Up @@ -477,7 +477,7 @@ void ResizeNonSooImpl(CommonFields& common, size_t new_capacity,

common.set_capacity(new_capacity);
RawHashSetLayout layout(new_capacity, slot_size, slot_align, has_infoz);
void* alloc = policy.alloc_fn(common);
void* alloc = policy.get_char_alloc(common);
char* mem = static_cast<char*>(policy.alloc(alloc, layout.alloc_size()));
const GenerationType old_generation = common.generation();
common.set_generation_ptr(
Expand Down Expand Up @@ -589,7 +589,7 @@ void ResizeFullSooTable(CommonFields& common, size_t new_capacity,
common.set_capacity(new_capacity);

RawHashSetLayout layout(new_capacity, slot_size, slot_align, has_infoz);
void* alloc = policy.alloc_fn(common);
void* alloc = policy.get_char_alloc(common);
char* mem = static_cast<char*>(policy.alloc(alloc, layout.alloc_size()));
const GenerationType old_generation = common.generation();
common.set_generation_ptr(
Expand Down Expand Up @@ -767,7 +767,7 @@ size_t GrowToNextCapacityAndPrepareInsert(CommonFields& common, size_t new_hash,
const bool has_infoz = infoz.IsSampled();

RawHashSetLayout layout(new_capacity, slot_size, slot_align, has_infoz);
void* alloc = policy.alloc_fn(common);
void* alloc = policy.get_char_alloc(common);
char* mem = static_cast<char*>(policy.alloc(alloc, layout.alloc_size()));
const GenerationType old_generation = common.generation();
common.set_generation_ptr(
Expand Down Expand Up @@ -928,8 +928,8 @@ void Rehash(CommonFields& common, size_t n, const PolicyFunctions& policy) {
const size_t cap = common.capacity();

auto clear_backing_array = [&]() {
ClearBackingArray(common, policy, policy.alloc_fn(common), /*reuse=*/false,
policy.soo_capacity > 0);
ClearBackingArray(common, policy, policy.get_char_alloc(common),
/*reuse=*/false, policy.soo_capacity > 0);
};

const size_t slot_size = policy.slot_size;
Expand All @@ -956,9 +956,9 @@ void Rehash(CommonFields& common, size_t n, const PolicyFunctions& policy) {
assert(policy.slot_align <= alignof(HeapOrSoo));
HeapOrSoo tmp_slot;
size_t begin_offset = FindFirstFullSlot(0, cap, common.control());
policy.transfer(
&common, &tmp_slot,
SlotAddress(common.slot_array(), begin_offset, slot_size), 1);
policy.transfer(&common, &tmp_slot,
SlotAddress(common.slot_array(), begin_offset, slot_size),
1);
clear_backing_array();
policy.transfer(&common, common.soo_data(), &tmp_slot, 1);
common.set_full_soo();
Expand Down
100 changes: 52 additions & 48 deletions absl/container/internal/raw_hash_set.h
Original file line number Diff line number Diff line change
Expand Up @@ -2012,8 +2012,8 @@ struct PolicyFunctions {
// TODO(b/382423690): consider having separate `transfer` and `transfer_n`.
void (*transfer)(void* set, void* dst_slot, void* src_slot, size_t count);

// Returns the pointer to the allocator stored in the set.
void* (*alloc_fn)(CommonFields& common);
// Returns the pointer to the CharAlloc stored in the set.
void* (*get_char_alloc)(CommonFields& common);

// Allocates n bytes for the backing store for common.
void* (*alloc)(void* alloc, size_t n);
Expand Down Expand Up @@ -2159,7 +2159,7 @@ void GrowFullSooTableToNextCapacity(CommonFields& common, size_t soo_slot_hash,
// The decision to sample was already made during the first insertion.
RawHashSetLayout layout(kNewCapacity, slot_size, slot_align,
/*has_infoz=*/false);
void* alloc = policy.alloc_fn(common);
void* alloc = policy.get_char_alloc(common);
char* mem = static_cast<char*>(policy.alloc(alloc, layout.alloc_size()));
const GenerationType old_generation = common.generation();
common.set_generation_ptr(
Expand Down Expand Up @@ -2675,7 +2675,7 @@ class raw_hash_set {

raw_hash_set(const raw_hash_set& that)
: raw_hash_set(that, AllocTraits::select_on_container_copy_construction(
that.alloc_ref())) {}
allocator_type(that.char_alloc_ref()))) {}

raw_hash_set(const raw_hash_set& that, const allocator_type& a)
: raw_hash_set(GrowthToLowerboundCapacity(that.size()), that.hash_ref(),
Expand Down Expand Up @@ -2758,7 +2758,7 @@ class raw_hash_set {
settings_(PolicyTraits::transfer_uses_memcpy() || !that.is_full_soo()
? std::move(that.common())
: CommonFields{full_soo_tag_t{}},
that.hash_ref(), that.eq_ref(), that.alloc_ref()) {
that.hash_ref(), that.eq_ref(), that.char_alloc_ref()) {
if (!PolicyTraits::transfer_uses_memcpy() && that.is_full_soo()) {
transfer(soo_slot(), that.soo_slot());
}
Expand All @@ -2769,7 +2769,7 @@ class raw_hash_set {
raw_hash_set(raw_hash_set&& that, const allocator_type& a)
: settings_(CommonFields::CreateDefault<SooEnabled()>(), that.hash_ref(),
that.eq_ref(), a) {
if (a == that.alloc_ref()) {
if (CharAlloc(a) == that.char_alloc_ref()) {
swap_common(that);
annotate_for_bug_detection_on_move(that);
} else {
Expand All @@ -2786,7 +2786,9 @@ class raw_hash_set {
// is an exact match for that.size(). If this->capacity() is too big, then
// it would make iteration very slow to reuse the allocation. Maybe we can
// do the same heuristic as clear() and reuse if it's small enough.
raw_hash_set tmp(that, propagate_alloc ? that.alloc_ref() : alloc_ref());
allocator_type alloc(propagate_alloc ? that.char_alloc_ref()
: char_alloc_ref());
raw_hash_set tmp(that, alloc);
// NOLINTNEXTLINE: not returning *this for performance.
return assign_impl<propagate_alloc>(std::move(tmp));
}
Expand Down Expand Up @@ -3112,7 +3114,8 @@ class raw_hash_set {
auto res = find_or_prepare_insert(key);
if (res.second) {
slot_type* slot = res.first.slot();
std::forward<F>(f)(constructor(&alloc_ref(), &slot));
allocator_type alloc(char_alloc_ref());
std::forward<F>(f)(constructor(&alloc, &slot));
ABSL_SWISSTABLE_ASSERT(!slot);
}
return res.first;
Expand Down Expand Up @@ -3216,7 +3219,8 @@ class raw_hash_set {
AssertNotDebugCapacity();
AssertIsFull(position.control(), position.inner_.generation(),
position.inner_.generation_ptr(), "extract()");
auto node = CommonAccess::Transfer<node_type>(alloc_ref(), position.slot());
allocator_type alloc(char_alloc_ref());
auto node = CommonAccess::Transfer<node_type>(alloc, position.slot());
if (is_soo()) {
common().set_empty_soo();
} else {
Expand All @@ -3242,7 +3246,7 @@ class raw_hash_set {
swap_common(that);
swap(hash_ref(), that.hash_ref());
swap(eq_ref(), that.eq_ref());
SwapAlloc(alloc_ref(), that.alloc_ref(),
SwapAlloc(char_alloc_ref(), that.char_alloc_ref(),
typename AllocTraits::propagate_on_container_swap{});
}

Expand Down Expand Up @@ -3364,7 +3368,9 @@ class raw_hash_set {

hasher hash_function() const { return hash_ref(); }
key_equal key_eq() const { return eq_ref(); }
allocator_type get_allocator() const { return alloc_ref(); }
allocator_type get_allocator() const {
return allocator_type(char_alloc_ref());
}

friend bool operator==(const raw_hash_set& a, const raw_hash_set& b) {
if (a.size() != b.size()) return false;
Expand Down Expand Up @@ -3431,7 +3437,7 @@ class raw_hash_set {
struct EqualElement {
template <class K2, class... Args>
bool operator()(const K2& lhs, Args&&...) const {
return eq(lhs, rhs);
ABSL_SWISSTABLE_IGNORE_UNINITIALIZED_RETURN(eq(lhs, rhs));
}
const K1& rhs;
const key_equal& eq;
Expand Down Expand Up @@ -3469,16 +3475,21 @@ class raw_hash_set {
template <typename... Args>
inline void construct(slot_type* slot, Args&&... args) {
common().RunWithReentrancyGuard([&] {
PolicyTraits::construct(&alloc_ref(), slot, std::forward<Args>(args)...);
allocator_type alloc(char_alloc_ref());
PolicyTraits::construct(&alloc, slot, std::forward<Args>(args)...);
});
}
inline void destroy(slot_type* slot) {
common().RunWithReentrancyGuard(
[&] { PolicyTraits::destroy(&alloc_ref(), slot); });
common().RunWithReentrancyGuard([&] {
allocator_type alloc(char_alloc_ref());
PolicyTraits::destroy(&alloc, slot);
});
}
inline void transfer(slot_type* to, slot_type* from) {
common().RunWithReentrancyGuard(
[&] { PolicyTraits::transfer(&alloc_ref(), to, from); });
common().RunWithReentrancyGuard([&] {
allocator_type alloc(char_alloc_ref());
PolicyTraits::transfer(&alloc, to, from);
});
}

// TODO(b/289225379): consider having a helper class that has the impls for
Expand Down Expand Up @@ -3522,8 +3533,7 @@ class raw_hash_set {

void clear_backing_array(bool reuse) {
ABSL_SWISSTABLE_ASSERT(capacity() > DefaultCapacity());
CharAlloc alloc(alloc_ref());
ClearBackingArray(common(), GetPolicyFunctions(), &alloc, reuse,
ClearBackingArray(common(), GetPolicyFunctions(), &char_alloc_ref(), reuse,
SooEnabled());
}

Expand All @@ -3541,9 +3551,8 @@ class raw_hash_set {
// Unpoison before returning the memory to the allocator.
SanitizerUnpoisonMemoryRegion(slot_array(), sizeof(slot_type) * capacity());
infoz().Unregister();
CharAlloc alloc(alloc_ref());
DeallocateBackingArray<BackingArrayAlignment(alignof(slot_type)),
CharAlloc>(&alloc, capacity(), control(),
CharAlloc>(&char_alloc_ref(), capacity(), control(),
sizeof(slot_type), alignof(slot_type),
common().has_infoz());
}
Expand Down Expand Up @@ -3598,7 +3607,7 @@ class raw_hash_set {
static slot_type* to_slot(void* buf) { return static_cast<slot_type*>(buf); }

// Requires that lhs does not have a full SOO slot.
static void move_common(bool rhs_is_full_soo, allocator_type& rhs_alloc,
static void move_common(bool rhs_is_full_soo, CharAlloc& rhs_alloc,
CommonFields& lhs, CommonFields&& rhs) {
if (PolicyTraits::transfer_uses_memcpy() || !rhs_is_full_soo) {
lhs = std::move(rhs);
Expand All @@ -3623,10 +3632,12 @@ class raw_hash_set {
}
CommonFields tmp = CommonFields(uninitialized_tag_t{});
const bool that_is_full_soo = that.is_full_soo();
move_common(that_is_full_soo, that.alloc_ref(), tmp,
move_common(that_is_full_soo, that.char_alloc_ref(), tmp,
std::move(that.common()));
move_common(is_full_soo(), alloc_ref(), that.common(), std::move(common()));
move_common(that_is_full_soo, that.alloc_ref(), common(), std::move(tmp));
move_common(is_full_soo(), char_alloc_ref(), that.common(),
std::move(common()));
move_common(that_is_full_soo, that.char_alloc_ref(), common(),
std::move(tmp));
}

void annotate_for_bug_detection_on_move(
Expand All @@ -3653,11 +3664,11 @@ class raw_hash_set {
// We don't bother checking for this/that aliasing. We just need to avoid
// breaking the invariants in that case.
destructor_impl();
move_common(that.is_full_soo(), that.alloc_ref(), common(),
move_common(that.is_full_soo(), that.char_alloc_ref(), common(),
std::move(that.common()));
hash_ref() = that.hash_ref();
eq_ref() = that.eq_ref();
CopyAlloc(alloc_ref(), that.alloc_ref(),
CopyAlloc(char_alloc_ref(), that.char_alloc_ref(),
std::integral_constant<bool, propagate_alloc>());
that.common() = CommonFields::CreateDefault<SooEnabled()>();
annotate_for_bug_detection_on_move(that);
Expand All @@ -3684,7 +3695,7 @@ class raw_hash_set {
}
raw_hash_set& move_assign(raw_hash_set&& that,
std::false_type /*propagate_alloc*/) {
if (alloc_ref() == that.alloc_ref()) {
if (char_alloc_ref() == that.char_alloc_ref()) {
return assign_impl<false>(std::move(that));
}
// Aliasing can't happen here because allocs would compare equal above.
Expand Down Expand Up @@ -3913,10 +3924,12 @@ class raw_hash_set {
}
slot_type* soo_slot() {
ABSL_SWISSTABLE_ASSERT(is_soo());
return static_cast<slot_type*>(common().soo_data());
ABSL_SWISSTABLE_IGNORE_UNINITIALIZED_RETURN(
static_cast<slot_type*>(common().soo_data()));
}
const slot_type* soo_slot() const {
return const_cast<raw_hash_set*>(this)->soo_slot();
ABSL_SWISSTABLE_IGNORE_UNINITIALIZED_RETURN(
const_cast<raw_hash_set*>(this)->soo_slot());
}
iterator soo_iterator() {
return {SooControl(), soo_slot(), common().generation_ptr()};
Expand All @@ -3933,14 +3946,14 @@ class raw_hash_set {
const hasher& hash_ref() const { return settings_.template get<1>(); }
key_equal& eq_ref() { return settings_.template get<2>(); }
const key_equal& eq_ref() const { return settings_.template get<2>(); }
allocator_type& alloc_ref() { return settings_.template get<3>(); }
const allocator_type& alloc_ref() const {
CharAlloc& char_alloc_ref() { return settings_.template get<3>(); }
const CharAlloc& char_alloc_ref() const {
return settings_.template get<3>();
}

static void* get_alloc_ref_fn(CommonFields& common) {
static void* get_char_alloc_ref_fn(CommonFields& common) {
auto* h = reinterpret_cast<raw_hash_set*>(&common);
return &h->alloc_ref();
return &h->char_alloc_ref();
}
static void* get_hash_ref_fn(CommonFields& common) {
auto* h = reinterpret_cast<raw_hash_set*>(&common);
Expand Down Expand Up @@ -3989,11 +4002,6 @@ class raw_hash_set {
static_assert(sizeof(value_type) <= (std::numeric_limits<uint32_t>::max)());
static constexpr size_t kBackingArrayAlignment =
BackingArrayAlignment(alignof(slot_type));
// TODO(b/397461659): store CharAlloc in the table instead of Alloc.
// If both allocators are empty, we can use the same pointer for both
// allocators.
static constexpr bool kAllocAndCharAllocPointersCompatible =
std::is_empty_v<CharAlloc> && std::is_empty_v<Alloc>;
static constexpr PolicyFunctions value = {
sizeof(key_type), sizeof(value_type), sizeof(slot_type),
alignof(slot_type), SooEnabled() ? SooCapacity() : 0,
Expand All @@ -4007,13 +4015,9 @@ class raw_hash_set {
? TransferRelocatable<sizeof(slot_type)>
: &raw_hash_set::transfer_slots_fn,
std::is_empty_v<Alloc> ? &GetRefForEmptyClass
: &raw_hash_set::get_alloc_ref_fn,
kAllocAndCharAllocPointersCompatible
? &AllocateBackingArray<kBackingArrayAlignment, CharAlloc>
: &AllocateBackingArray<kBackingArrayAlignment, Alloc>,
kAllocAndCharAllocPointersCompatible
? &DeallocateBackingArray<kBackingArrayAlignment, CharAlloc>
: &DeallocateBackingArray<kBackingArrayAlignment, Alloc>,
: &raw_hash_set::get_char_alloc_ref_fn,
&AllocateBackingArray<kBackingArrayAlignment, CharAlloc>,
&DeallocateBackingArray<kBackingArrayAlignment, CharAlloc>,
&raw_hash_set::find_new_positions_and_transfer_slots_fn};
return value;
}
Expand All @@ -4022,9 +4026,9 @@ class raw_hash_set {
// CompressedTuple will ensure that sizeof is not affected by any of the empty
// fields that occur after CommonFields.
absl::container_internal::CompressedTuple<CommonFields, hasher, key_equal,
allocator_type>
CharAlloc>
settings_{CommonFields::CreateDefault<SooEnabled()>(), hasher{},
key_equal{}, allocator_type{}};
key_equal{}, CharAlloc{}};
};

// Friend access for free functions in raw_hash_set.h.
Expand Down
Loading

0 comments on commit 7971b4a

Please sign in to comment.