protobuf/src/google/protobuf/arena_impl.h
Go to the documentation of this file.
1 // Protocol Buffers - Google's data interchange format
2 // Copyright 2008 Google Inc. All rights reserved.
3 // https://developers.google.com/protocol-buffers/
4 //
5 // Redistribution and use in source and binary forms, with or without
6 // modification, are permitted provided that the following conditions are
7 // met:
8 //
9 // * Redistributions of source code must retain the above copyright
10 // notice, this list of conditions and the following disclaimer.
11 // * Redistributions in binary form must reproduce the above
12 // copyright notice, this list of conditions and the following disclaimer
13 // in the documentation and/or other materials provided with the
14 // distribution.
15 // * Neither the name of Google Inc. nor the names of its
16 // contributors may be used to endorse or promote products derived from
17 // this software without specific prior written permission.
18 //
19 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
20 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
21 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
22 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
23 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
24 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
25 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
26 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
27 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
28 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
29 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30 
31 // This file defines an Arena allocator for better allocation performance.
32 
33 #ifndef GOOGLE_PROTOBUF_ARENA_IMPL_H__
34 #define GOOGLE_PROTOBUF_ARENA_IMPL_H__
35 
36 #include <atomic>
37 #include <limits>
38 #include <typeinfo>
39 
40 #include <google/protobuf/stubs/common.h>
41 #include <google/protobuf/stubs/logging.h>
42 
43 #ifdef ADDRESS_SANITIZER
44 #include <sanitizer/asan_interface.h>
45 #endif // ADDRESS_SANITIZER
46 
47 #include <google/protobuf/port_def.inc>
48 
49 
50 namespace google {
51 namespace protobuf {
52 namespace internal {
53 
54 inline constexpr size_t AlignUpTo8(size_t n) {
55  // Align n to next multiple of 8 (from Hacker's Delight, Chapter 3.)
56  return (n + 7) & static_cast<size_t>(-8);
57 }
58 
60 
61 // MetricsCollector collects stats for a particular arena.
62 class PROTOBUF_EXPORT ArenaMetricsCollector {
63  public:
64  ArenaMetricsCollector(bool record_allocs) : record_allocs_(record_allocs) {}
65 
66  // Invoked when the arena is about to be destroyed. This method will
67  // typically finalize any metric collection and delete the collector.
68  // space_allocated is the space used by the arena.
69  virtual void OnDestroy(uint64_t space_allocated) = 0;
70 
71  // OnReset() is called when the associated arena is reset.
72  // space_allocated is the space used by the arena just before the reset.
73  virtual void OnReset(uint64_t space_allocated) = 0;
74 
75  // OnAlloc is called when an allocation happens.
76  // type_info is promised to be static - its lifetime extends to
77  // match program's lifetime (It is given by typeid operator).
78  // Note: typeid(void) will be passed as allocated_type every time we
79  // intentionally want to avoid monitoring an allocation. (i.e. internal
80  // allocations for managing the arena)
81  virtual void OnAlloc(const std::type_info* allocated_type,
82  uint64_t alloc_size) = 0;
83 
84  // Does OnAlloc() need to be called? If false, metric collection overhead
85  // will be reduced since we will not do extra work per allocation.
86  bool RecordAllocs() { return record_allocs_; }
87 
88  protected:
89  // This class is destructed by the call to OnDestroy().
90  ~ArenaMetricsCollector() = default;
91  const bool record_allocs_;
92 };
93 
95  static constexpr size_t kDefaultStartBlockSize = 256;
96  static constexpr size_t kDefaultMaxBlockSize = 8192;
97 
100  void* (*block_alloc)(size_t) = nullptr;
101  void (*block_dealloc)(void*, size_t) = nullptr;
103 
104  bool IsDefault() const {
107  block_dealloc == nullptr && metrics_collector == nullptr;
108  }
109 };
110 
111 // Tagged pointer to an AllocationPolicy.
113  public:
114  constexpr TaggedAllocationPolicyPtr() : policy_(0) {}
115 
117  : policy_(reinterpret_cast<uintptr_t>(policy)) {}
118 
119  void set_policy(AllocationPolicy* policy) {
120  auto bits = policy_ & kTagsMask;
121  policy_ = reinterpret_cast<uintptr_t>(policy) | bits;
122  }
123 
125  return reinterpret_cast<AllocationPolicy*>(policy_ & kPtrMask);
126  }
127  const AllocationPolicy* get() const {
128  return reinterpret_cast<const AllocationPolicy*>(policy_ & kPtrMask);
129  }
130 
131  AllocationPolicy& operator*() { return *get(); }
132  const AllocationPolicy& operator*() const { return *get(); }
133 
134  AllocationPolicy* operator->() { return get(); }
135  const AllocationPolicy* operator->() const { return get(); }
136 
138  return static_cast<bool>(get_mask<kUserOwnedInitialBlock>());
139  }
141  set_mask<kUserOwnedInitialBlock>(v);
142  }
143 
144  bool should_record_allocs() const {
145  return static_cast<bool>(get_mask<kRecordAllocs>());
146  }
147  void set_should_record_allocs(bool v) { set_mask<kRecordAllocs>(v); }
148 
149  uintptr_t get_raw() const { return policy_; }
150 
151  inline void RecordAlloc(const std::type_info* allocated_type,
152  size_t n) const {
153  get()->metrics_collector->OnAlloc(allocated_type, n);
154  }
155 
156  private:
157  enum : uintptr_t {
160  };
161 
162  static constexpr uintptr_t kTagsMask = 7;
163  static constexpr uintptr_t kPtrMask = ~kTagsMask;
164 
165  template <uintptr_t kMask>
166  uintptr_t get_mask() const {
167  return policy_ & kMask;
168  }
169  template <uintptr_t kMask>
170  void set_mask(bool v) {
171  if (v) {
172  policy_ |= kMask;
173  } else {
174  policy_ &= ~kMask;
175  }
176  }
178 };
179 
180 // A simple arena allocator. Calls to allocate functions must be properly
181 // serialized by the caller, hence this class cannot be used as a general
182 // purpose allocator in a multi-threaded program. It serves as a building block
183 // for ThreadSafeArena, which provides a thread-safe arena allocator.
184 //
185 // This class manages
186 // 1) Arena bump allocation + owning memory blocks.
187 // 2) Maintaining a cleanup list.
188 // It delagetes the actual memory allocation back to ThreadSafeArena, which
189 // contains the information on block growth policy and backing memory allocation
190 // used.
191 class PROTOBUF_EXPORT SerialArena {
192  public:
193  struct Memory {
194  void* ptr;
195  size_t size;
196  };
197 
198  // Node contains the ptr of the object to be cleaned up and the associated
199  // cleanup function ptr.
200  struct CleanupNode {
201  void* elem; // Pointer to the object to be cleaned up.
202  void (*cleanup)(void*); // Function pointer to the destructor or deleter.
203  };
204 
205  void CleanupList();
207  return space_allocated_.load(std::memory_order_relaxed);
208  }
209  uint64_t SpaceUsed() const;
210 
211  bool HasSpace(size_t n) { return n <= static_cast<size_t>(limit_ - ptr_); }
212 
213  void* AllocateAligned(size_t n, const AllocationPolicy* policy) {
214  GOOGLE_DCHECK_EQ(internal::AlignUpTo8(n), n); // Must be already aligned.
215  GOOGLE_DCHECK_GE(limit_, ptr_);
216  if (PROTOBUF_PREDICT_FALSE(!HasSpace(n))) {
217  return AllocateAlignedFallback(n, policy);
218  }
219  return AllocateFromExisting(n);
220  }
221 
222  private:
223  void* AllocateFromExisting(size_t n) {
224  void* ret = ptr_;
225  ptr_ += n;
226 #ifdef ADDRESS_SANITIZER
227  ASAN_UNPOISON_MEMORY_REGION(ret, n);
228 #endif // ADDRESS_SANITIZER
229  return ret;
230  }
231 
232  public:
233  // Allocate space if the current region provides enough space.
234  bool MaybeAllocateAligned(size_t n, void** out) {
235  GOOGLE_DCHECK_EQ(internal::AlignUpTo8(n), n); // Must be already aligned.
236  GOOGLE_DCHECK_GE(limit_, ptr_);
237  if (PROTOBUF_PREDICT_FALSE(!HasSpace(n))) return false;
238  *out = AllocateFromExisting(n);
239  return true;
240  }
241 
242  std::pair<void*, CleanupNode*> AllocateAlignedWithCleanup(
243  size_t n, const AllocationPolicy* policy) {
244  GOOGLE_DCHECK_EQ(internal::AlignUpTo8(n), n); // Must be already aligned.
245  if (PROTOBUF_PREDICT_FALSE(!HasSpace(n + kCleanupSize))) {
246  return AllocateAlignedWithCleanupFallback(n, policy);
247  }
248  return AllocateFromExistingWithCleanupFallback(n);
249  }
250 
251  private:
252  std::pair<void*, CleanupNode*> AllocateFromExistingWithCleanupFallback(
253  size_t n) {
254  void* ret = ptr_;
255  ptr_ += n;
256  limit_ -= kCleanupSize;
257 #ifdef ADDRESS_SANITIZER
258  ASAN_UNPOISON_MEMORY_REGION(ret, n);
259  ASAN_UNPOISON_MEMORY_REGION(limit_, kCleanupSize);
260 #endif // ADDRESS_SANITIZER
261  return CreatePair(ret, reinterpret_cast<CleanupNode*>(limit_));
262  }
263 
264  public:
265  void AddCleanup(void* elem, void (*cleanup)(void*),
266  const AllocationPolicy* policy) {
267  auto res = AllocateAlignedWithCleanup(0, policy);
268  res.second->elem = elem;
269  res.second->cleanup = cleanup;
270  }
271 
272  void* owner() const { return owner_; }
273  SerialArena* next() const { return next_; }
274  void set_next(SerialArena* next) { next_ = next; }
275 
276  private:
277  friend class ThreadSafeArena;
278  friend class ArenaBenchmark;
279 
280  // Creates a new SerialArena inside mem using the remaining memory as for
281  // future allocations.
282  static SerialArena* New(SerialArena::Memory mem, void* owner);
283  // Free SerialArena returning the memory passed in to New
284  template <typename Deallocator>
285  Memory Free(Deallocator deallocator);
286 
287  // Blocks are variable length malloc-ed objects. The following structure
288  // describes the common header for all blocks.
289  struct Block {
290  Block(Block* next, size_t size) : next(next), size(size), start(nullptr) {}
291 
292  char* Pointer(size_t n) {
293  GOOGLE_DCHECK(n <= size);
294  return reinterpret_cast<char*>(this) + n;
295  }
296 
297  Block* const next;
298  const size_t size;
300  // data follows
301  };
302 
303  void* owner_; // &ThreadCache of this thread;
304  Block* head_; // Head of linked list of blocks.
305  SerialArena* next_; // Next SerialArena in this linked list.
306  size_t space_used_ = 0; // Necessary for metrics.
307  std::atomic<size_t> space_allocated_;
308 
309  // Next pointer to allocate from. Always 8-byte aligned. Points inside
310  // head_ (and head_->pos will always be non-canonical). We keep these
311  // here to reduce indirection.
312  char* ptr_;
313  char* limit_;
314 
315  // Constructor is private as only New() should be used.
316  inline SerialArena(Block* b, void* owner);
317  void* AllocateAlignedFallback(size_t n, const AllocationPolicy* policy);
318  std::pair<void*, CleanupNode*> AllocateAlignedWithCleanupFallback(
319  size_t n, const AllocationPolicy* policy);
320  void AllocateNewBlock(size_t n, const AllocationPolicy* policy);
321 
322  std::pair<void*, CleanupNode*> CreatePair(void* ptr, CleanupNode* node) {
323  return {ptr, node};
324  }
325 
326  public:
327  static constexpr size_t kBlockHeaderSize = AlignUpTo8(sizeof(Block));
328  static constexpr size_t kCleanupSize = AlignUpTo8(sizeof(CleanupNode));
329 };
330 
331 // Tag type used to invoke the constructor of message-owned arena.
332 // Only message-owned arenas use this constructor for creation.
333 // Such constructors are internal implementation details of the library.
334 struct MessageOwned {
335  explicit MessageOwned() = default;
336 };
337 
338 // This class provides the core Arena memory allocation library. Different
339 // implementations only need to implement the public interface below.
340 // Arena is not a template type as that would only be useful if all protos
341 // in turn would be templates, which will/cannot happen. However separating
342 // the memory allocation part from the cruft of the API users expect we can
343 // use #ifdef the select the best implementation based on hardware / OS.
344 class PROTOBUF_EXPORT ThreadSafeArena {
345  public:
347 
348  // Constructor solely used by message-owned arena.
349  ThreadSafeArena(internal::MessageOwned) : tag_and_id_(kMessageOwnedArena) {
350  Init();
351  }
352 
353  ThreadSafeArena(char* mem, size_t size) { InitializeFrom(mem, size); }
354 
355  explicit ThreadSafeArena(void* mem, size_t size,
356  const AllocationPolicy& policy) {
357  InitializeWithPolicy(mem, size, policy);
358  }
359 
360  // Destructor deletes all owned heap allocated objects, and destructs objects
361  // that have non-trivial destructors, except for proto2 message objects whose
362  // destructors can be skipped. Also, frees all blocks except the initial block
363  // if it was passed in.
364  ~ThreadSafeArena();
365 
366  uint64_t Reset();
367 
368  uint64_t SpaceAllocated() const;
369  uint64_t SpaceUsed() const;
370 
371  void* AllocateAligned(size_t n, const std::type_info* type) {
373  if (PROTOBUF_PREDICT_TRUE(!alloc_policy_.should_record_allocs() &&
374  GetSerialArenaFast(&arena))) {
375  return arena->AllocateAligned(n, AllocPolicy());
376  } else {
377  return AllocateAlignedFallback(n, type);
378  }
379  }
380 
381  // This function allocates n bytes if the common happy case is true and
382  // returns true. Otherwise does nothing and returns false. This strange
383  // semantics is necessary to allow callers to program functions that only
384  // have fallback function calls in tail position. This substantially improves
385  // code for the happy path.
386  PROTOBUF_NDEBUG_INLINE bool MaybeAllocateAligned(size_t n, void** out) {
387  SerialArena* a;
388  if (PROTOBUF_PREDICT_TRUE(!alloc_policy_.should_record_allocs() &&
389  GetSerialArenaFromThreadCache(&a))) {
390  return a->MaybeAllocateAligned(n, out);
391  }
392  return false;
393  }
394 
395  std::pair<void*, SerialArena::CleanupNode*> AllocateAlignedWithCleanup(
396  size_t n, const std::type_info* type);
397 
398  // Add object pointer and cleanup function pointer to the list.
399  void AddCleanup(void* elem, void (*cleanup)(void*));
400 
401  // Checks whether this arena is message-owned.
402  PROTOBUF_ALWAYS_INLINE bool IsMessageOwned() const {
403  return tag_and_id_ & kMessageOwnedArena;
404  }
405 
406  private:
407  // Unique for each arena. Changes on Reset().
408  uint64_t tag_and_id_ = 0;
409  // The LSB of tag_and_id_ indicates if the arena is message-owned.
410  enum : uint64_t { kMessageOwnedArena = 1 };
411 
412  TaggedAllocationPolicyPtr alloc_policy_; // Tagged pointer to AllocPolicy.
413 
414  // Pointer to a linked list of SerialArena.
415  std::atomic<SerialArena*> threads_;
416  std::atomic<SerialArena*> hint_; // Fast thread-local block access
417 
418  const AllocationPolicy* AllocPolicy() const { return alloc_policy_.get(); }
419  void InitializeFrom(void* mem, size_t size);
420  void InitializeWithPolicy(void* mem, size_t size, AllocationPolicy policy);
421  void* AllocateAlignedFallback(size_t n, const std::type_info* type);
422  std::pair<void*, SerialArena::CleanupNode*>
423  AllocateAlignedWithCleanupFallback(size_t n, const std::type_info* type);
424 
425  void Init();
426  void SetInitialBlock(void* mem, size_t size);
427 
428  // Delete or Destruct all objects owned by the arena.
429  void CleanupList();
430 
431  inline uint64_t LifeCycleId() const {
432  return tag_and_id_ & ~kMessageOwnedArena;
433  }
434 
435  inline void CacheSerialArena(SerialArena* serial) {
436  thread_cache().last_serial_arena = serial;
437  thread_cache().last_lifecycle_id_seen = tag_and_id_;
438  // TODO(haberman): evaluate whether we would gain efficiency by getting rid
439  // of hint_. It's the only write we do to ThreadSafeArena in the allocation
440  // path, which will dirty the cache line.
441 
442  hint_.store(serial, std::memory_order_release);
443  }
444 
445  PROTOBUF_NDEBUG_INLINE bool GetSerialArenaFast(SerialArena** arena) {
446  if (GetSerialArenaFromThreadCache(arena)) return true;
447 
448  // Check whether we own the last accessed SerialArena on this arena. This
449  // fast path optimizes the case where a single thread uses multiple arenas.
450  ThreadCache* tc = &thread_cache();
451  SerialArena* serial = hint_.load(std::memory_order_acquire);
452  if (PROTOBUF_PREDICT_TRUE(serial != NULL && serial->owner() == tc)) {
453  *arena = serial;
454  return true;
455  }
456  return false;
457  }
458 
459  PROTOBUF_NDEBUG_INLINE bool GetSerialArenaFromThreadCache(
460  SerialArena** arena) {
461  // If this thread already owns a block in this arena then try to use that.
462  // This fast path optimizes the case where multiple threads allocate from
463  // the same arena.
464  ThreadCache* tc = &thread_cache();
465  if (PROTOBUF_PREDICT_TRUE(tc->last_lifecycle_id_seen == tag_and_id_)) {
466  *arena = tc->last_serial_arena;
467  return true;
468  }
469  return false;
470  }
471  SerialArena* GetSerialArenaFallback(void* me);
472 
473  template <typename Functor>
474  void PerSerialArena(Functor fn) {
475  // By omitting an Acquire barrier we ensure that any user code that doesn't
476  // properly synchronize Reset() or the destructor will throw a TSAN warning.
477  SerialArena* serial = threads_.load(std::memory_order_relaxed);
478 
479  for (; serial; serial = serial->next()) fn(serial);
480  }
481 
482  // Releases all memory except the first block which it returns. The first
483  // block might be owned by the user and thus need some extra checks before
484  // deleting.
485  SerialArena::Memory Free(size_t* space_allocated);
486 
487 #ifdef _MSC_VER
488 #pragma warning(disable : 4324)
489 #endif
490  struct alignas(64) ThreadCache {
491 #if defined(GOOGLE_PROTOBUF_NO_THREADLOCAL)
492  // If we are using the ThreadLocalStorage class to store the ThreadCache,
493  // then the ThreadCache's default constructor has to be responsible for
494  // initializing it.
495  ThreadCache()
496  : next_lifecycle_id(0),
497  last_lifecycle_id_seen(-1),
498  last_serial_arena(NULL) {}
499 #endif
500 
501  // Number of per-thread lifecycle IDs to reserve. Must be power of two.
502  // To reduce contention on a global atomic, each thread reserves a batch of
503  // IDs. The following number is calculated based on a stress test with
504  // ~6500 threads all frequently allocating a new arena.
505  static constexpr size_t kPerThreadIds = 256;
506  // Next lifecycle ID available to this thread. We need to reserve a new
507  // batch, if `next_lifecycle_id & (kPerThreadIds - 1) == 0`.
509  // The ThreadCache is considered valid as long as this matches the
510  // lifecycle_id of the arena being used.
513  };
514 
515  // Lifecycle_id can be highly contended variable in a situation of lots of
516  // arena creation. Make sure that other global variables are not sharing the
517  // cacheline.
518 #ifdef _MSC_VER
519 #pragma warning(disable : 4324)
520 #endif
521  struct alignas(64) CacheAlignedLifecycleIdGenerator {
522  std::atomic<LifecycleIdAtomic> id;
523  };
525 #if defined(GOOGLE_PROTOBUF_NO_THREADLOCAL)
526  // iOS does not support __thread keyword so we use a custom thread local
527  // storage class we implemented.
528  static ThreadCache& thread_cache();
529 #elif defined(PROTOBUF_USE_DLLS)
530  // Thread local variables cannot be exposed through DLL interface but we can
531  // wrap them in static functions.
532  static ThreadCache& thread_cache();
533 #else
534  static PROTOBUF_THREAD_LOCAL ThreadCache thread_cache_;
535  static ThreadCache& thread_cache() { return thread_cache_; }
536 #endif
537 
539  // All protos have pointers back to the arena hence Arena must have
540  // pointer stability.
541  ThreadSafeArena(ThreadSafeArena&&) = delete;
542  ThreadSafeArena& operator=(ThreadSafeArena&&) = delete;
543 
544  public:
545  // kBlockHeaderSize is sizeof(Block), aligned up to the nearest multiple of 8
546  // to protect the invariant that pos is always at a multiple of 8.
547  static constexpr size_t kBlockHeaderSize = SerialArena::kBlockHeaderSize;
548  static constexpr size_t kSerialArenaSize =
549  (sizeof(SerialArena) + 7) & static_cast<size_t>(-8);
550  static_assert(kBlockHeaderSize % 8 == 0,
551  "kBlockHeaderSize must be a multiple of 8.");
552  static_assert(kSerialArenaSize % 8 == 0,
553  "kSerialArenaSize must be a multiple of 8.");
554 };
555 
556 } // namespace internal
557 } // namespace protobuf
558 } // namespace google
559 
560 #include <google/protobuf/port_undef.inc>
561 
562 #endif // GOOGLE_PROTOBUF_ARENA_IMPL_H__
ptr
char * ptr
Definition: abseil-cpp/absl/base/internal/low_level_alloc_test.cc:45
google::protobuf.internal::SerialArena::HasSpace
bool HasSpace(size_t n)
Definition: protobuf/src/google/protobuf/arena_impl.h:211
threads_
std::vector< grpc_core::Thread > threads_
Definition: event_engine/iomgr_engine/timer_manager.cc:52
google::protobuf.internal::TaggedAllocationPolicyPtr::kRecordAllocs
@ kRecordAllocs
Definition: protobuf/src/google/protobuf/arena_impl.h:159
google::protobuf.internal::TaggedAllocationPolicyPtr::kTagsMask
static constexpr uintptr_t kTagsMask
Definition: protobuf/src/google/protobuf/arena_impl.h:162
gen_build_yaml.out
dictionary out
Definition: src/benchmark/gen_build_yaml.py:24
google::protobuf.internal::TaggedAllocationPolicyPtr::should_record_allocs
bool should_record_allocs() const
Definition: protobuf/src/google/protobuf/arena_impl.h:144
google::protobuf.internal::ThreadSafeArena
Definition: protobuf/src/google/protobuf/arena_impl.h:344
google::protobuf.internal::SerialArena
Definition: protobuf/src/google/protobuf/arena_impl.h:191
GOOGLE_DISALLOW_EVIL_CONSTRUCTORS
#define GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(TypeName)
Definition: bloaty/third_party/protobuf/src/google/protobuf/stubs/macros.h:40
google::protobuf.internal::ThreadSafeArena::LifeCycleId
uint64_t LifeCycleId() const
Definition: protobuf/src/google/protobuf/arena_impl.h:431
cleanup
void cleanup(void)
Definition: bloaty/third_party/zlib/examples/enough.c:182
google::protobuf.internal::TaggedAllocationPolicyPtr::kPtrMask
static constexpr uintptr_t kPtrMask
Definition: protobuf/src/google/protobuf/arena_impl.h:163
google::protobuf.internal::AllocationPolicy::block_alloc
void *(* block_alloc)(size_t)
Definition: protobuf/src/google/protobuf/arena_impl.h:100
google::protobuf.internal::ThreadSafeArena::CacheSerialArena
void CacheSerialArena(SerialArena *serial)
Definition: protobuf/src/google/protobuf/arena_impl.h:435
google::protobuf.internal::ThreadSafeArena::lifecycle_id_generator_
static CacheAlignedLifecycleIdGenerator lifecycle_id_generator_
Definition: protobuf/src/google/protobuf/arena_impl.h:524
google::protobuf.internal::ThreadSafeArena::hint_
std::atomic< SerialArena * > hint_
Definition: protobuf/src/google/protobuf/arena_impl.h:416
Free
Definition: bssl_shim.cc:90
google::protobuf.internal::SerialArena::Memory::size
size_t size
Definition: protobuf/src/google/protobuf/arena_impl.h:195
google::protobuf.internal::AllocationPolicy::block_dealloc
void(* block_dealloc)(void *, size_t)
Definition: protobuf/src/google/protobuf/arena_impl.h:101
google::protobuf.internal::AllocationPolicy::IsDefault
bool IsDefault() const
Definition: protobuf/src/google/protobuf/arena_impl.h:104
GOOGLE_DCHECK
#define GOOGLE_DCHECK
Definition: bloaty/third_party/protobuf/src/google/protobuf/stubs/logging.h:194
google::protobuf.internal::AllocationPolicy::start_block_size
size_t start_block_size
Definition: protobuf/src/google/protobuf/arena_impl.h:98
google::protobuf.internal::AllocationPolicy::kDefaultStartBlockSize
static constexpr size_t kDefaultStartBlockSize
Definition: protobuf/src/google/protobuf/arena_impl.h:95
google::protobuf.internal::TaggedAllocationPolicyPtr::operator->
AllocationPolicy * operator->()
Definition: protobuf/src/google/protobuf/arena_impl.h:134
google::protobuf.internal::AlignUpTo8
size_t AlignUpTo8(size_t n)
Definition: bloaty/third_party/protobuf/src/google/protobuf/arena_impl.h:53
elem
Timer elem
Definition: event_engine/iomgr_event_engine/timer_heap_test.cc:109
New
T * New(Args &&... args)
Definition: third_party/boringssl-with-bazel/src/ssl/internal.h:195
google::protobuf
Definition: bloaty/third_party/protobuf/benchmarks/util/data_proto2_to_proto3_util.h:12
google::protobuf.internal::ThreadSafeArena::AllocPolicy
const AllocationPolicy * AllocPolicy() const
Definition: protobuf/src/google/protobuf/arena_impl.h:418
google::protobuf.internal::ThreadSafeArena::ThreadSafeArena
ThreadSafeArena()
Definition: protobuf/src/google/protobuf/arena_impl.h:346
google::protobuf::python::cmessage::Init
static int Init(CMessage *self, PyObject *args, PyObject *kwargs)
Definition: bloaty/third_party/protobuf/python/google/protobuf/pyext/message.cc:1287
google::protobuf.internal::ArenaMetricsCollector::ArenaMetricsCollector
ArenaMetricsCollector(bool record_allocs)
Definition: protobuf/src/google/protobuf/arena_impl.h:64
a
int a
Definition: abseil-cpp/absl/container/internal/hash_policy_traits_test.cc:88
google::protobuf.internal::SerialArena::Memory::ptr
void * ptr
Definition: protobuf/src/google/protobuf/arena_impl.h:194
google::protobuf.internal::SerialArena::Block::Block
Block(Block *next, size_t size)
Definition: protobuf/src/google/protobuf/arena_impl.h:290
google::protobuf.internal::SerialArena::set_next
void set_next(SerialArena *next)
Definition: protobuf/src/google/protobuf/arena_impl.h:274
google::protobuf.internal::TaggedAllocationPolicyPtr::get_mask
uintptr_t get_mask() const
Definition: protobuf/src/google/protobuf/arena_impl.h:166
ptr_
std::string * ptr_
Definition: abseil-cpp/absl/container/internal/container_memory_test.cc:103
google::protobuf.internal::SerialArena::head_
Block * head_
Definition: protobuf/src/google/protobuf/arena_impl.h:304
google::protobuf.internal::TaggedAllocationPolicyPtr::is_user_owned_initial_block
bool is_user_owned_initial_block() const
Definition: protobuf/src/google/protobuf/arena_impl.h:137
google::protobuf.internal::SerialArena::CleanupNode
Definition: protobuf/src/google/protobuf/arena_impl.h:200
arena
grpc_core::ScopedArenaPtr arena
Definition: binder_transport_test.cc:237
google::protobuf.internal::ArenaMetricsCollector::RecordAllocs
bool RecordAllocs()
Definition: protobuf/src/google/protobuf/arena_impl.h:86
google::protobuf.internal::ThreadSafeArena::ThreadCache::last_lifecycle_id_seen
uint64_t last_lifecycle_id_seen
Definition: protobuf/src/google/protobuf/arena_impl.h:511
google::protobuf.internal::SerialArena::next_
SerialArena * next_
Definition: protobuf/src/google/protobuf/arena_impl.h:305
google::protobuf.internal::TaggedAllocationPolicyPtr::TaggedAllocationPolicyPtr
constexpr TaggedAllocationPolicyPtr()
Definition: protobuf/src/google/protobuf/arena_impl.h:114
start
static uint64_t start
Definition: benchmark-pound.c:74
google::protobuf.internal::SerialArena::MaybeAllocateAligned
bool MaybeAllocateAligned(size_t n, void **out)
Definition: protobuf/src/google/protobuf/arena_impl.h:234
google::protobuf.internal::TaggedAllocationPolicyPtr::TaggedAllocationPolicyPtr
TaggedAllocationPolicyPtr(AllocationPolicy *policy)
Definition: protobuf/src/google/protobuf/arena_impl.h:116
google::protobuf.internal::ThreadSafeArena::AllocateAligned
void * AllocateAligned(size_t n, const std::type_info *type)
Definition: protobuf/src/google/protobuf/arena_impl.h:371
google::protobuf.internal::ThreadSafeArena::ThreadCache::last_serial_arena
SerialArena * last_serial_arena
Definition: protobuf/src/google/protobuf/arena_impl.h:512
google::protobuf.internal::SerialArena::AllocateFromExisting
void * AllocateFromExisting(size_t n)
Definition: protobuf/src/google/protobuf/arena_impl.h:223
mox.Reset
def Reset(*args)
Definition: bloaty/third_party/protobuf/python/mox.py:257
google::protobuf.internal::SerialArena::Block
Definition: protobuf/src/google/protobuf/arena_impl.h:289
google::protobuf.internal::MessageOwned::MessageOwned
MessageOwned()=default
generate-asm-lcov.fn
fn
Definition: generate-asm-lcov.py:146
google::protobuf.internal::ArenaMetricsCollector::record_allocs_
const bool record_allocs_
Definition: protobuf/src/google/protobuf/arena_impl.h:91
setup.v
v
Definition: third_party/bloaty/third_party/capstone/bindings/python/setup.py:42
google::protobuf.internal::ThreadSafeArena::thread_cache
static ThreadCache & thread_cache()
Definition: protobuf/src/google/protobuf/arena_impl.h:535
google::protobuf.internal::SerialArena::AllocateAligned
void * AllocateAligned(size_t n, const AllocationPolicy *policy)
Definition: protobuf/src/google/protobuf/arena_impl.h:213
google::protobuf.internal::SerialArena::owner
void * owner() const
Definition: protobuf/src/google/protobuf/arena_impl.h:272
google::protobuf.internal::TaggedAllocationPolicyPtr::operator*
const AllocationPolicy & operator*() const
Definition: protobuf/src/google/protobuf/arena_impl.h:132
google::protobuf.internal::AllocationPolicy::metrics_collector
ArenaMetricsCollector * metrics_collector
Definition: protobuf/src/google/protobuf/arena_impl.h:102
google::protobuf.internal::SerialArena::Block::Pointer
char * Pointer(size_t n)
Definition: protobuf/src/google/protobuf/arena_impl.h:292
uint64_t
unsigned __int64 uint64_t
Definition: stdint-msvc2008.h:90
google::protobuf.internal::TaggedAllocationPolicyPtr::RecordAlloc
void RecordAlloc(const std::type_info *allocated_type, size_t n) const
Definition: protobuf/src/google/protobuf/arena_impl.h:151
bits
OPENSSL_EXPORT ASN1_BIT_STRING * bits
Definition: x509v3.h:482
google::protobuf.internal::SerialArena::AllocateAlignedWithCleanup
std::pair< void *, CleanupNode * > AllocateAlignedWithCleanup(size_t n, const AllocationPolicy *policy)
Definition: protobuf/src/google/protobuf/arena_impl.h:242
GOOGLE_DCHECK_GE
#define GOOGLE_DCHECK_GE
Definition: bloaty/third_party/protobuf/src/google/protobuf/stubs/logging.h:201
google::protobuf.internal::ThreadSafeArena::ThreadSafeArena
ThreadSafeArena(char *mem, size_t size)
Definition: protobuf/src/google/protobuf/arena_impl.h:353
google::protobuf.internal::SerialArena::CreatePair
std::pair< void *, CleanupNode * > CreatePair(void *ptr, CleanupNode *node)
Definition: protobuf/src/google/protobuf/arena_impl.h:322
google::protobuf.internal::TaggedAllocationPolicyPtr::set_mask
void set_mask(bool v)
Definition: protobuf/src/google/protobuf/arena_impl.h:170
google::protobuf.internal::TaggedAllocationPolicyPtr::operator->
const AllocationPolicy * operator->() const
Definition: protobuf/src/google/protobuf/arena_impl.h:135
uintptr_t
_W64 unsigned int uintptr_t
Definition: stdint-msvc2008.h:119
b
uint64_t b
Definition: abseil-cpp/absl/container/internal/layout_test.cc:53
google::protobuf.internal::ThreadSafeArena::CacheAlignedLifecycleIdGenerator::id
std::atomic< LifecycleIdAtomic > id
Definition: protobuf/src/google/protobuf/arena_impl.h:522
n
int n
Definition: abseil-cpp/absl/container/btree_test.cc:1080
google::protobuf.internal::AllocationPolicy
Definition: protobuf/src/google/protobuf/arena_impl.h:94
mem
void * mem
Definition: libc.cpp:91
google::protobuf.internal::SerialArena::next
SerialArena * next() const
Definition: protobuf/src/google/protobuf/arena_impl.h:273
google::protobuf.internal::ThreadSafeArena::ThreadSafeArena
ThreadSafeArena(internal::MessageOwned)
Definition: protobuf/src/google/protobuf/arena_impl.h:349
google::protobuf.internal::ThreadSafeArena::alloc_policy_
TaggedAllocationPolicyPtr alloc_policy_
Definition: protobuf/src/google/protobuf/arena_impl.h:412
google::protobuf.internal::ThreadSafeArena::ThreadSafeArena
ThreadSafeArena(void *mem, size_t size, const AllocationPolicy &policy)
Definition: protobuf/src/google/protobuf/arena_impl.h:355
google::protobuf.internal::SerialArena::Block::size
const size_t size
Definition: protobuf/src/google/protobuf/arena_impl.h:298
google::protobuf.internal::SerialArena::limit_
char * limit_
Definition: protobuf/src/google/protobuf/arena_impl.h:313
google::protobuf.internal::SerialArena::CleanupNode::elem
void * elem
Definition: protobuf/src/google/protobuf/arena_impl.h:201
google::protobuf.internal::TaggedAllocationPolicyPtr::set_should_record_allocs
void set_should_record_allocs(bool v)
Definition: protobuf/src/google/protobuf/arena_impl.h:147
google::protobuf.internal::TaggedAllocationPolicyPtr::policy_
uintptr_t policy_
Definition: protobuf/src/google/protobuf/arena_impl.h:177
google::protobuf.internal::SerialArena::Memory
Definition: protobuf/src/google/protobuf/arena_impl.h:193
google::protobuf.internal::TaggedAllocationPolicyPtr::get_raw
uintptr_t get_raw() const
Definition: protobuf/src/google/protobuf/arena_impl.h:149
google::protobuf.internal::SerialArena::space_allocated_
std::atomic< size_t > space_allocated_
Definition: protobuf/src/google/protobuf/arena_impl.h:307
google::protobuf.internal::TaggedAllocationPolicyPtr
Definition: protobuf/src/google/protobuf/arena_impl.h:112
ret
UniquePtr< SSL_SESSION > ret
Definition: ssl_x509.cc:1029
google::protobuf.internal::SerialArena::SpaceAllocated
uint64_t SpaceAllocated() const
Definition: protobuf/src/google/protobuf/arena_impl.h:206
next
AllocList * next[kMaxLevel]
Definition: abseil-cpp/absl/base/internal/low_level_alloc.cc:100
google::protobuf.internal::TaggedAllocationPolicyPtr::get
AllocationPolicy * get()
Definition: protobuf/src/google/protobuf/arena_impl.h:124
google::protobuf.internal::TaggedAllocationPolicyPtr::set_is_user_owned_initial_block
void set_is_user_owned_initial_block(bool v)
Definition: protobuf/src/google/protobuf/arena_impl.h:140
GOOGLE_DCHECK_EQ
#define GOOGLE_DCHECK_EQ
Definition: bloaty/third_party/protobuf/src/google/protobuf/stubs/logging.h:196
google::protobuf.internal::ThreadSafeArena::GetSerialArenaFast
PROTOBUF_NDEBUG_INLINE bool GetSerialArenaFast(SerialArena **arena)
Definition: protobuf/src/google/protobuf/arena_impl.h:445
google::protobuf.internal::MessageOwned
Definition: protobuf/src/google/protobuf/arena_impl.h:334
google::protobuf.internal::ThreadSafeArena::IsMessageOwned
PROTOBUF_ALWAYS_INLINE bool IsMessageOwned() const
Definition: protobuf/src/google/protobuf/arena_impl.h:402
google::protobuf.internal::SerialArena::ptr_
char * ptr_
Definition: protobuf/src/google/protobuf/arena_impl.h:312
google::protobuf.internal::TaggedAllocationPolicyPtr::kUserOwnedInitialBlock
@ kUserOwnedInitialBlock
Definition: protobuf/src/google/protobuf/arena_impl.h:158
internal
Definition: benchmark/test/output_test_helper.cc:20
google::protobuf.internal::SerialArena::Block::start
CleanupNode * start
Definition: protobuf/src/google/protobuf/arena_impl.h:299
google::protobuf.internal::TaggedAllocationPolicyPtr::operator*
AllocationPolicy & operator*()
Definition: protobuf/src/google/protobuf/arena_impl.h:131
google::protobuf.internal::ThreadSafeArena::MaybeAllocateAligned
PROTOBUF_NDEBUG_INLINE bool MaybeAllocateAligned(size_t n, void **out)
Definition: protobuf/src/google/protobuf/arena_impl.h:386
google::protobuf.internal::ThreadSafeArena::PerSerialArena
void PerSerialArena(Functor fn)
Definition: protobuf/src/google/protobuf/arena_impl.h:474
google::protobuf.internal::AllocationPolicy::max_block_size
size_t max_block_size
Definition: protobuf/src/google/protobuf/arena_impl.h:99
asyncio_get_stats.type
type
Definition: asyncio_get_stats.py:37
google::protobuf.internal::SerialArena::Block::next
Block *const next
Definition: protobuf/src/google/protobuf/arena_impl.h:297
cleanup
Definition: cleanup.py:1
google::protobuf.internal::ThreadSafeArena::thread_cache_
static PROTOBUF_THREAD_LOCAL ThreadCache thread_cache_
Definition: protobuf/src/google/protobuf/arena_impl.h:534
size
voidpf void uLong size
Definition: bloaty/third_party/zlib/contrib/minizip/ioapi.h:136
google::protobuf.internal::ArenaMetricsCollector::OnAlloc
virtual void OnAlloc(const std::type_info *allocated_type, uint64_t alloc_size)=0
google::protobuf.internal::ThreadSafeArena::GetSerialArenaFromThreadCache
PROTOBUF_NDEBUG_INLINE bool GetSerialArenaFromThreadCache(SerialArena **arena)
Definition: protobuf/src/google/protobuf/arena_impl.h:459
google::protobuf.internal::ThreadSafeArena::threads_
std::atomic< SerialArena * > threads_
Definition: protobuf/src/google/protobuf/arena_impl.h:415
google::protobuf.internal::ArenaMetricsCollector
Definition: protobuf/src/google/protobuf/arena_impl.h:62
google::protobuf.internal::ThreadSafeArena::CacheAlignedLifecycleIdGenerator
Definition: protobuf/src/google/protobuf/arena_impl.h:521
google::protobuf.internal::AllocationPolicy::kDefaultMaxBlockSize
static constexpr size_t kDefaultMaxBlockSize
Definition: protobuf/src/google/protobuf/arena_impl.h:96
google::protobuf.internal::SerialArena::AllocateFromExistingWithCleanupFallback
std::pair< void *, CleanupNode * > AllocateFromExistingWithCleanupFallback(size_t n)
Definition: protobuf/src/google/protobuf/arena_impl.h:252
google::protobuf.internal::SerialArena::owner_
void * owner_
Definition: protobuf/src/google/protobuf/arena_impl.h:303
google::protobuf.internal::ThreadSafeArena::ThreadCache
Definition: protobuf/src/google/protobuf/arena_impl.h:490
google::protobuf.internal::SerialArena::AddCleanup
void AddCleanup(void *elem, void(*cleanup)(void *), const AllocationPolicy *policy)
Definition: protobuf/src/google/protobuf/arena_impl.h:265
google::protobuf.internal::TaggedAllocationPolicyPtr::set_policy
void set_policy(AllocationPolicy *policy)
Definition: protobuf/src/google/protobuf/arena_impl.h:119
google
Definition: bloaty/third_party/protobuf/benchmarks/util/data_proto2_to_proto3_util.h:11
google::protobuf.internal::LifecycleIdAtomic
uint64_t LifecycleIdAtomic
Definition: protobuf/src/google/protobuf/arena_impl.h:59
google::protobuf.internal::SerialArena::kBlockHeaderSize
static constexpr size_t kBlockHeaderSize
Definition: protobuf/src/google/protobuf/arena_impl.h:327
google::protobuf.internal::ThreadSafeArena::ThreadCache::next_lifecycle_id
uint64_t next_lifecycle_id
Definition: protobuf/src/google/protobuf/arena_impl.h:508
google::protobuf.internal::TaggedAllocationPolicyPtr::get
const AllocationPolicy * get() const
Definition: protobuf/src/google/protobuf/arena_impl.h:127


grpc
Author(s):
autogenerated on Fri May 16 2025 02:57:42