22 #include <type_traits> 34 #ifndef ABSL_LOW_LEVEL_ALLOC_MISSING 57 #if defined(__APPLE__) 60 #if !defined MAP_ANONYMOUS 61 #define MAP_ANONYMOUS MAP_ANON 62 #endif // !MAP_ANONYMOUS 66 namespace base_internal {
111 for (
size_t i = size;
i > base;
i >>= 1) {
124 while ((((r = r*1103515245 + 12345) >> 30) & 1) == 0) {
143 size_t max_fit = (size - offsetof(AllocList,
next)) /
sizeof(AllocList *);
144 int level =
IntLog2(size, base) + (random !=
nullptr ?
Random(random) : 1);
145 if (static_cast<size_t>(level) > max_fit) level = static_cast<int>(max_fit);
146 if (level > kMaxLevel-1) level = kMaxLevel - 1;
147 ABSL_RAW_CHECK(level >= 1,
"block not big enough for even one level");
156 AllocList *e, AllocList **prev) {
158 for (
int level = head->levels - 1; level >= 0; level--) {
159 for (AllocList *
n; (
n = p->next[level]) !=
nullptr &&
n < e; p =
n) {
163 return (head->levels == 0) ?
nullptr : prev[0]->next[0];
172 for (; head->levels < e->levels; head->levels++) {
173 prev[head->levels] = head;
175 for (
int i = 0;
i != e->levels;
i++) {
176 e->next[
i] = prev[
i]->next[
i];
177 prev[
i]->next[
i] = e;
188 for (
int i = 0;
i != e->levels && prev[
i]->next[
i] == e;
i++) {
189 prev[
i]->next[
i] = e->next[
i];
191 while (head->levels > 0 && head->next[head->levels - 1] ==
nullptr) {
202 explicit Arena(uint32_t flags_value);
228 ArenaStorage default_arena_storage;
229 ArenaStorage unhooked_arena_storage;
230 #ifndef ABSL_LOW_LEVEL_ALLOC_ASYNC_SIGNAL_SAFE_MISSING 231 ArenaStorage unhooked_async_sig_safe_arena_storage;
238 void CreateGlobalArenas() {
239 new (&default_arena_storage)
242 #ifndef ABSL_LOW_LEVEL_ALLOC_ASYNC_SIGNAL_SAFE_MISSING 243 new (&unhooked_async_sig_safe_arena_storage)
255 #ifndef ABSL_LOW_LEVEL_ALLOC_ASYNC_SIGNAL_SAFE_MISSING 261 &unhooked_async_sig_safe_arena_storage);
283 #ifndef ABSL_LOW_LEVEL_ALLOC_ASYNC_SIGNAL_SAFE_MISSING 295 #ifndef ABSL_LOW_LEVEL_ALLOC_ASYNC_SIGNAL_SAFE_MISSING 297 const int err = pthread_sigmask(SIG_SETMASK, &
mask_,
nullptr);
308 #ifndef ABSL_LOW_LEVEL_ALLOC_ASYNC_SIGNAL_SAFE_MISSING 313 ArenaLock(
const ArenaLock &) =
delete;
314 ArenaLock &operator=(
const ArenaLock &) =
delete;
321 return magic ^
reinterpret_cast<uintptr_t
>(
ptr);
325 size_t GetPageSize() {
327 SYSTEM_INFO system_info;
328 GetSystemInfo(&system_info);
329 return std::max(system_info.dwPageSize, system_info.dwAllocationGranularity);
330 #elif defined(__wasm__) || defined(__asmjs__) 331 return getpagesize();
333 return sysconf(_SC_PAGESIZE);
337 size_t RoundedUpBlockSize() {
340 while (roundup <
sizeof(AllocList::Header)) {
352 pagesize(GetPageSize()),
353 roundup(RoundedUpBlockSize()),
354 min_size(2 * roundup),
356 freelist.header.size = 0;
357 freelist.header.magic =
358 Magic(kMagicUnallocated, &freelist.header);
359 freelist.header.arena =
this;
361 memset(freelist.next, 0,
sizeof(freelist.next));
367 #ifndef ABSL_LOW_LEVEL_ALLOC_ASYNC_SIGNAL_SAFE_MISSING 369 meta_data_arena = UnhookedAsyncSigSafeArena();
373 meta_data_arena = UnhookedArena();
383 arena !=
nullptr && arena !=
DefaultArena() && arena != UnhookedArena(),
384 "may not delete default arena");
385 ArenaLock section(arena);
386 if (arena->allocation_count != 0) {
390 while (arena->freelist.next[0] !=
nullptr) {
391 AllocList *region = arena->freelist.next[0];
392 size_t size = region->header.size;
393 arena->freelist.next[0] = region->next[0];
395 region->header.magic ==
Magic(kMagicUnallocated, ®ion->header),
396 "bad magic number in DeleteArena()");
398 "bad arena pointer in DeleteArena()");
400 "empty arena has non-page-aligned block size");
402 "empty arena has non-page-aligned block");
405 munmap_result = VirtualFree(region, 0, MEM_RELEASE);
407 "LowLevelAlloc::DeleteArena: VitualFree failed");
409 #ifndef ABSL_LOW_LEVEL_ALLOC_ASYNC_SIGNAL_SAFE_MISSING 411 munmap_result = munmap(region, size);
413 munmap_result = base_internal::DirectMunmap(region, size);
416 munmap_result = munmap(region, size);
417 #endif // ABSL_LOW_LEVEL_ALLOC_ASYNC_SIGNAL_SAFE_MISSING 418 if (munmap_result != 0) {
419 ABSL_RAW_LOG(FATAL,
"LowLevelAlloc::DeleteArena: munmap failed: %d",
435 uintptr_t sum = a +
b;
443 return CheckedAdd(addr, align - 1) & ~(align - 1);
453 AllocList *
next = prev->next[
i];
454 if (next !=
nullptr) {
456 next->header.magic ==
Magic(kMagicUnallocated, &next->header),
457 "bad magic number in Next()");
458 ABSL_RAW_CHECK(next->header.arena == arena,
"bad arena pointer in Next()");
459 if (prev != &arena->freelist) {
461 ABSL_RAW_CHECK(reinterpret_cast<char *>(prev) + prev->header.size <
462 reinterpret_cast<char *>(next),
463 "malformed freelist");
471 AllocList *
n = a->next[0];
472 if (n !=
nullptr && reinterpret_cast<char *>(a) + a->header.size ==
473 reinterpret_cast<char *>(n)) {
475 a->header.size += n->header.size;
477 n->header.arena =
nullptr;
490 AllocList *f =
reinterpret_cast<AllocList *
>(
491 reinterpret_cast<char *
>(
v) -
sizeof (f->header));
493 "bad magic number in AddToFreelist()");
495 "bad arena pointer in AddToFreelist()");
500 f->header.magic =
Magic(kMagicUnallocated, &f->header);
509 AllocList *f =
reinterpret_cast<AllocList *
>(
510 reinterpret_cast<char *
>(
v) -
sizeof (f->header));
512 "bad magic number in Free()");
514 ArenaLock section(arena);
516 ABSL_RAW_CHECK(arena->allocation_count > 0,
"nothing in arena to free");
517 arena->allocation_count--;
525 void *result =
nullptr;
528 ArenaLock section(arena);
535 if (i < arena->freelist.levels) {
536 AllocList *before = &arena->freelist;
537 while ((s =
Next(i, before, arena)) !=
nullptr &&
538 s->header.size < req_rnd) {
553 new_pages = VirtualAlloc(0, new_pages_size,
554 MEM_RESERVE | MEM_COMMIT, PAGE_READWRITE);
557 #ifndef ABSL_LOW_LEVEL_ALLOC_ASYNC_SIGNAL_SAFE_MISSING 559 new_pages = base_internal::DirectMmap(
nullptr, new_pages_size,
560 PROT_WRITE|PROT_READ, MAP_ANONYMOUS|MAP_PRIVATE, -1, 0);
562 new_pages = mmap(
nullptr, new_pages_size, PROT_WRITE | PROT_READ,
563 MAP_ANONYMOUS | MAP_PRIVATE, -1, 0);
566 new_pages = mmap(
nullptr, new_pages_size, PROT_WRITE | PROT_READ,
567 MAP_ANONYMOUS | MAP_PRIVATE, -1, 0);
568 #endif // ABSL_LOW_LEVEL_ALLOC_ASYNC_SIGNAL_SAFE_MISSING 569 if (new_pages == MAP_FAILED) {
575 s =
reinterpret_cast<AllocList *
>(new_pages);
576 s->header.size = new_pages_size;
578 s->header.magic =
Magic(kMagicAllocated, &s->header);
579 s->header.arena =
arena;
587 AllocList *
n =
reinterpret_cast<AllocList *
> 588 (req_rnd +
reinterpret_cast<char *
>(s));
589 n->header.size = s->header.size - req_rnd;
590 n->header.magic =
Magic(kMagicAllocated, &n->header);
591 n->header.arena =
arena;
592 s->header.size = req_rnd;
595 s->header.magic =
Magic(kMagicAllocated, &s->header);
597 arena->allocation_count++;
619 #endif // ABSL_LOW_LEVEL_ALLOC_MISSING
void Lock() EXCLUSIVE_LOCK_FUNCTION()
#define EXCLUSIVE_LOCK_FUNCTION(...)
static const uintptr_t kMagicUnallocated
static uintptr_t RoundUp(uintptr_t addr, uintptr_t align)
static void LLA_SkiplistDelete(AllocList *head, AllocList *e, AllocList **prev)
static AllocList * Next(int i, AllocList *prev, LowLevelAlloc::Arena *arena)
#define ANNOTATE_MEMORY_IS_UNINITIALIZED(address, size)
#define ABSL_RAW_LOG(severity,...)
static Arena * DefaultArena()
static int LLA_SkiplistLevels(size_t size, size_t base, uint32_t *random)
void Unlock() UNLOCK_FUNCTION()
static int Random(uint32_t *state)
static uintptr_t CheckedAdd(uintptr_t a, uintptr_t b)
Arena(uint32_t flags_value)
static void * Alloc(size_t request) ABSL_ATTRIBUTE_SECTION(malloc_hook)
static uintptr_t Magic(uintptr_t magic, AllocList::Header *ptr)
CONSTEXPR_F fields align(second_tag, fields f) noexcept
static void * DoAllocWithArena(size_t request, LowLevelAlloc::Arena *arena)
#define ABSL_RAW_CHECK(condition, message)
static const int kMaxLevel
AllocList * next[kMaxLevel]
void LowLevelCallOnce(absl::once_flag *flag, Callable &&fn, Args &&...args)
base_internal::SpinLock mu
static const uintptr_t kMagicAllocated
static void AddToFreelist(void *v, LowLevelAlloc::Arena *arena)
#define UNLOCK_FUNCTION(...)
LowLevelAlloc::Arena * arena_
static std::vector< uint32_t > *tid_array GUARDED_BY(tid_lock)
LowLevelAlloc::Arena * arena
static void Free(void *s) ABSL_ATTRIBUTE_SECTION(malloc_hook)
static void LLA_SkiplistInsert(AllocList *head, AllocList *e, AllocList **prev)
static void Coalesce(AllocList *a)
static void * AllocWithArena(size_t request, Arena *arena) ABSL_ATTRIBUTE_SECTION(malloc_hook)
void * dummy_for_alignment
static Arena * NewArena(int32_t flags)
static bool DeleteArena(Arena *arena)
struct absl::base_internal::@37::AllocList::Header header
static AllocList * LLA_SkiplistSearch(AllocList *head, AllocList *e, AllocList **prev)
static int IntLog2(size_t size, size_t base)