Move SigSafeArena() out to absl/base/internal/low_level_alloc.h

This allows dynamic memory allocation for computing stack traces to avoid a stack overflow.

PiperOrigin-RevId: 786512779
Change-Id: Ib5ef8fef436672b99d9678137e3b2bb65ca47eba
diff --git a/absl/base/internal/low_level_alloc.cc b/absl/base/internal/low_level_alloc.cc
index 2dd3604..0afd2f9 100644
--- a/absl/base/internal/low_level_alloc.cc
+++ b/absl/base/internal/low_level_alloc.cc
@@ -19,6 +19,8 @@
 
 #include "absl/base/internal/low_level_alloc.h"
 
+#include <stdint.h>
+
 #include <optional>
 #include <type_traits>
 
@@ -220,6 +222,32 @@
   uint32_t random ABSL_GUARDED_BY(mu);
 };
 
+// ---------------------------------------------------------------
+// An async-signal-safe arena for LowLevelAlloc
+static std::atomic<base_internal::LowLevelAlloc::Arena *> g_sig_safe_arena;
+
+base_internal::LowLevelAlloc::Arena *SigSafeArena() {
+  return g_sig_safe_arena.load(std::memory_order_acquire);
+}
+
+void InitSigSafeArena() {
+  if (SigSafeArena() == nullptr) {
+    uint32_t flags = 0;
+#ifndef ABSL_LOW_LEVEL_ALLOC_ASYNC_SIGNAL_SAFE_MISSING
+    flags |= base_internal::LowLevelAlloc::kAsyncSignalSafe;
+#endif
+    base_internal::LowLevelAlloc::Arena *new_arena =
+        base_internal::LowLevelAlloc::NewArena(flags);
+    base_internal::LowLevelAlloc::Arena *old_value = nullptr;
+    if (!g_sig_safe_arena.compare_exchange_strong(old_value, new_arena,
+                                                  std::memory_order_release,
+                                                  std::memory_order_relaxed)) {
+      // We lost a race to allocate an arena; deallocate.
+      base_internal::LowLevelAlloc::DeleteArena(new_arena);
+    }
+  }
+}
+
 namespace {
 // Static storage space for the lazily-constructed, default global arena
 // instances.  We require this space because the whole point of LowLevelAlloc
diff --git a/absl/base/internal/low_level_alloc.h b/absl/base/internal/low_level_alloc.h
index c2f1f25..23218dd 100644
--- a/absl/base/internal/low_level_alloc.h
+++ b/absl/base/internal/low_level_alloc.h
@@ -120,6 +120,12 @@
   LowLevelAlloc();      // no instances
 };
 
+// Returns a global async-signal-safe arena for LowLevelAlloc.
+LowLevelAlloc::Arena *SigSafeArena();
+
+// Ensures the global async-signal-safe arena for LowLevelAlloc is initialized.
+void InitSigSafeArena();
+
 }  // namespace base_internal
 ABSL_NAMESPACE_END
 }  // namespace absl
diff --git a/absl/debugging/symbolize_elf.inc b/absl/debugging/symbolize_elf.inc
index 17baff4..f635061 100644
--- a/absl/debugging/symbolize_elf.inc
+++ b/absl/debugging/symbolize_elf.inc
@@ -233,29 +233,6 @@
 };
 
 // ---------------------------------------------------------------
-// An async-signal-safe arena for LowLevelAlloc
-static std::atomic<base_internal::LowLevelAlloc::Arena *> g_sig_safe_arena;
-
-static base_internal::LowLevelAlloc::Arena *SigSafeArena() {
-  return g_sig_safe_arena.load(std::memory_order_acquire);
-}
-
-static void InitSigSafeArena() {
-  if (SigSafeArena() == nullptr) {
-    base_internal::LowLevelAlloc::Arena *new_arena =
-        base_internal::LowLevelAlloc::NewArena(
-            base_internal::LowLevelAlloc::kAsyncSignalSafe);
-    base_internal::LowLevelAlloc::Arena *old_value = nullptr;
-    if (!g_sig_safe_arena.compare_exchange_strong(old_value, new_arena,
-                                                  std::memory_order_release,
-                                                  std::memory_order_relaxed)) {
-      // We lost a race to allocate an arena; deallocate.
-      base_internal::LowLevelAlloc::DeleteArena(new_arena);
-    }
-  }
-}
-
-// ---------------------------------------------------------------
 // An AddrMap is a vector of ObjFile, using SigSafeArena() for allocation.
 
 class AddrMap {
@@ -287,7 +264,7 @@
     size_t new_allocated = allocated_ * 2 + 50;
     ObjFile *new_obj_ =
         static_cast<ObjFile *>(base_internal::LowLevelAlloc::AllocWithArena(
-            new_allocated * sizeof(*new_obj_), SigSafeArena()));
+            new_allocated * sizeof(*new_obj_), base_internal::SigSafeArena()));
     if (obj_) {
       memcpy(new_obj_, obj_, allocated_ * sizeof(*new_obj_));
       base_internal::LowLevelAlloc::Free(obj_);
@@ -335,8 +312,9 @@
  private:
   char *CopyString(const char *s) {
     size_t len = strlen(s);
-    char *dst = static_cast<char *>(
-        base_internal::LowLevelAlloc::AllocWithArena(len + 1, SigSafeArena()));
+    char *dst =
+        static_cast<char *>(base_internal::LowLevelAlloc::AllocWithArena(
+            len + 1, base_internal::SigSafeArena()));
     ABSL_RAW_CHECK(dst != nullptr, "out of memory");
     memcpy(dst, s, len + 1);
     return dst;
@@ -441,14 +419,14 @@
 // Return (and set null) g_cached_symbolized_state if it is not null.
 // Otherwise return a new symbolizer.
 static Symbolizer *AllocateSymbolizer() {
-  InitSigSafeArena();
+  base_internal::InitSigSafeArena();
   Symbolizer *symbolizer =
       g_cached_symbolizer.exchange(nullptr, std::memory_order_acquire);
   if (symbolizer != nullptr) {
     return symbolizer;
   }
   return new (base_internal::LowLevelAlloc::AllocWithArena(
-      SymbolizerSize(), SigSafeArena())) Symbolizer();
+      SymbolizerSize(), base_internal::SigSafeArena())) Symbolizer();
 }
 
 // Set g_cached_symbolize_state to s if it is null, otherwise
@@ -1678,7 +1656,7 @@
   SAFE_ASSERT(start <= end);
   SAFE_ASSERT(filename != nullptr);
 
-  InitSigSafeArena();
+  base_internal::InitSigSafeArena();
 
   if (!g_file_mapping_mu.TryLock()) {
     return false;
@@ -1690,8 +1668,9 @@
   } else {
     // TODO(ckennelly): Move this into a string copy routine.
     size_t len = strlen(filename);
-    char *dst = static_cast<char *>(
-        base_internal::LowLevelAlloc::AllocWithArena(len + 1, SigSafeArena()));
+    char *dst =
+        static_cast<char *>(base_internal::LowLevelAlloc::AllocWithArena(
+            len + 1, base_internal::SigSafeArena()));
     ABSL_RAW_CHECK(dst != nullptr, "out of memory");
     memcpy(dst, filename, len + 1);