mirror of
https://sourceware.org/git/glibc.git
synced 2025-07-29 11:41:21 +03:00
malloc: Improve malloc initialization
Move malloc initialization to __libc_early_init. Use a hidden __ptmalloc_init for initialization and a weak call to avoid pulling in the system malloc in a static binary. All previous initialization checks can now be removed. Reviewed-by: Florian Weimer <fweimer@redhat.com>
This commit is contained in:
@ -24,6 +24,7 @@
|
||||
#include <pthread_early_init.h>
|
||||
#include <sys/single_threaded.h>
|
||||
#include <getrandom-internal.h>
|
||||
#include <malloc/malloc-internal.h>
|
||||
|
||||
#ifdef SHARED
|
||||
_Bool __libc_initial;
|
||||
@ -32,6 +33,9 @@ _Bool __libc_initial;
|
||||
void
|
||||
__libc_early_init (_Bool initial)
|
||||
{
|
||||
/* Initialize system malloc. */
|
||||
call_function_static_weak (__ptmalloc_init);
|
||||
|
||||
/* Initialize ctype data. */
|
||||
__ctype_init ();
|
||||
|
||||
|
@ -113,9 +113,6 @@ static mstate free_list;
|
||||
acquired. */
|
||||
__libc_lock_define_initialized (static, list_lock);
|
||||
|
||||
/* Already initialized? */
|
||||
static bool __malloc_initialized = false;
|
||||
|
||||
/**************************************************************************/
|
||||
|
||||
|
||||
@ -168,9 +165,6 @@ arena_for_chunk (mchunkptr ptr)
|
||||
void
|
||||
__malloc_fork_lock_parent (void)
|
||||
{
|
||||
if (!__malloc_initialized)
|
||||
return;
|
||||
|
||||
/* We do not acquire free_list_lock here because we completely
|
||||
reconstruct free_list in __malloc_fork_unlock_child. */
|
||||
|
||||
@ -188,9 +182,6 @@ __malloc_fork_lock_parent (void)
|
||||
void
|
||||
__malloc_fork_unlock_parent (void)
|
||||
{
|
||||
if (!__malloc_initialized)
|
||||
return;
|
||||
|
||||
for (mstate ar_ptr = &main_arena;; )
|
||||
{
|
||||
__libc_lock_unlock (ar_ptr->mutex);
|
||||
@ -204,9 +195,6 @@ __malloc_fork_unlock_parent (void)
|
||||
void
|
||||
__malloc_fork_unlock_child (void)
|
||||
{
|
||||
if (!__malloc_initialized)
|
||||
return;
|
||||
|
||||
/* Push all arenas to the free list, except thread_arena, which is
|
||||
attached to the current thread. */
|
||||
__libc_lock_init (free_list_lock);
|
||||
@ -259,14 +247,9 @@ TUNABLE_CALLBACK_FNDECL (set_hugetlb, size_t)
|
||||
static void tcache_key_initialize (void);
|
||||
#endif
|
||||
|
||||
static void
|
||||
ptmalloc_init (void)
|
||||
void
|
||||
__ptmalloc_init (void)
|
||||
{
|
||||
if (__malloc_initialized)
|
||||
return;
|
||||
|
||||
__malloc_initialized = true;
|
||||
|
||||
#if USE_TCACHE
|
||||
tcache_key_initialize ();
|
||||
#endif
|
||||
|
@ -389,7 +389,7 @@ initialize_malloc_check (void)
|
||||
{
|
||||
/* This is the copy of the malloc initializer that we pulled in along with
|
||||
malloc-check. This does not affect any of the libc malloc structures. */
|
||||
ptmalloc_init ();
|
||||
__ptmalloc_init ();
|
||||
TUNABLE_GET (check, int32_t, TUNABLE_CALLBACK (set_mallopt_check));
|
||||
return __is_malloc_debug_enabled (MALLOC_CHECK_HOOK);
|
||||
}
|
||||
|
@ -40,4 +40,7 @@ void __malloc_arena_thread_freeres (void) attribute_hidden;
|
||||
/* Activate a standard set of debugging hooks. */
|
||||
void __malloc_check_init (void) attribute_hidden;
|
||||
|
||||
/* Initialize malloc. */
|
||||
void __ptmalloc_init (void) attribute_hidden;
|
||||
|
||||
#endif /* _MALLOC_INTERNAL_H */
|
||||
|
@ -1937,7 +1937,7 @@ static struct malloc_par mp_ =
|
||||
/*
|
||||
Initialize a malloc_state struct.
|
||||
|
||||
This is called from ptmalloc_init () or from _int_new_arena ()
|
||||
This is called from __ptmalloc_init () or from _int_new_arena ()
|
||||
when creating a new arena.
|
||||
*/
|
||||
|
||||
@ -3347,9 +3347,6 @@ __libc_malloc2 (size_t bytes)
|
||||
mstate ar_ptr;
|
||||
void *victim;
|
||||
|
||||
if (!__malloc_initialized)
|
||||
ptmalloc_init ();
|
||||
|
||||
MAYBE_INIT_TCACHE ();
|
||||
|
||||
if (SINGLE_THREAD_P)
|
||||
@ -3455,9 +3452,6 @@ __libc_realloc (void *oldmem, size_t bytes)
|
||||
|
||||
void *newp; /* chunk to return */
|
||||
|
||||
if (!__malloc_initialized)
|
||||
ptmalloc_init ();
|
||||
|
||||
#if REALLOC_ZERO_BYTES_FREES
|
||||
if (bytes == 0 && oldmem != NULL)
|
||||
{
|
||||
@ -3583,9 +3577,6 @@ libc_hidden_def (__libc_realloc)
|
||||
void *
|
||||
__libc_memalign (size_t alignment, size_t bytes)
|
||||
{
|
||||
if (!__malloc_initialized)
|
||||
ptmalloc_init ();
|
||||
|
||||
void *address = RETURN_ADDRESS (0);
|
||||
return _mid_memalign (alignment, bytes, address);
|
||||
}
|
||||
@ -3596,9 +3587,6 @@ void *
|
||||
weak_function
|
||||
aligned_alloc (size_t alignment, size_t bytes)
|
||||
{
|
||||
if (!__malloc_initialized)
|
||||
ptmalloc_init ();
|
||||
|
||||
/* Similar to memalign, but starting with ISO C17 the standard
|
||||
requires an error for alignments that are not supported by the
|
||||
implementation. Valid alignments for the current implementation
|
||||
@ -3698,9 +3686,6 @@ _mid_memalign (size_t alignment, size_t bytes, void *address)
|
||||
void *
|
||||
__libc_valloc (size_t bytes)
|
||||
{
|
||||
if (!__malloc_initialized)
|
||||
ptmalloc_init ();
|
||||
|
||||
void *address = RETURN_ADDRESS (0);
|
||||
size_t pagesize = GLRO (dl_pagesize);
|
||||
return _mid_memalign (pagesize, bytes, address);
|
||||
@ -3709,9 +3694,6 @@ __libc_valloc (size_t bytes)
|
||||
void *
|
||||
__libc_pvalloc (size_t bytes)
|
||||
{
|
||||
if (!__malloc_initialized)
|
||||
ptmalloc_init ();
|
||||
|
||||
void *address = RETURN_ADDRESS (0);
|
||||
size_t pagesize = GLRO (dl_pagesize);
|
||||
size_t rounded_bytes;
|
||||
@ -3746,9 +3728,6 @@ __libc_calloc (size_t n, size_t elem_size)
|
||||
|
||||
sz = bytes;
|
||||
|
||||
if (!__malloc_initialized)
|
||||
ptmalloc_init ();
|
||||
|
||||
#if USE_TCACHE
|
||||
size_t tc_idx = usize2tidx (bytes);
|
||||
if (tcache_available (tc_idx))
|
||||
@ -5211,9 +5190,6 @@ __malloc_trim (size_t s)
|
||||
{
|
||||
int result = 0;
|
||||
|
||||
if (!__malloc_initialized)
|
||||
ptmalloc_init ();
|
||||
|
||||
mstate ar_ptr = &main_arena;
|
||||
do
|
||||
{
|
||||
@ -5330,9 +5306,6 @@ __libc_mallinfo2 (void)
|
||||
struct mallinfo2 m;
|
||||
mstate ar_ptr;
|
||||
|
||||
if (!__malloc_initialized)
|
||||
ptmalloc_init ();
|
||||
|
||||
memset (&m, 0, sizeof (m));
|
||||
ar_ptr = &main_arena;
|
||||
do
|
||||
@ -5381,8 +5354,6 @@ __malloc_stats (void)
|
||||
mstate ar_ptr;
|
||||
unsigned int in_use_b = mp_.mmapped_mem, system_b = in_use_b;
|
||||
|
||||
if (!__malloc_initialized)
|
||||
ptmalloc_init ();
|
||||
_IO_flockfile (stderr);
|
||||
int old_flags2 = stderr->_flags2;
|
||||
stderr->_flags2 |= _IO_FLAGS2_NOTCANCEL;
|
||||
@ -5563,8 +5534,6 @@ __libc_mallopt (int param_number, int value)
|
||||
mstate av = &main_arena;
|
||||
int res = 1;
|
||||
|
||||
if (!__malloc_initialized)
|
||||
ptmalloc_init ();
|
||||
__libc_lock_lock (av->mutex);
|
||||
|
||||
LIBC_PROBE (memory_mallopt, 2, param_number, value);
|
||||
@ -5780,11 +5749,14 @@ malloc_printerr (const char *str)
|
||||
}
|
||||
|
||||
#if USE_TCACHE
|
||||
|
||||
static volatile int dummy_var;
|
||||
|
||||
static __attribute_noinline__ void
|
||||
malloc_printerr_tail (const char *str)
|
||||
{
|
||||
/* Ensure this cannot be a no-return function. */
|
||||
if (!__malloc_initialized)
|
||||
if (dummy_var)
|
||||
return;
|
||||
malloc_printerr (str);
|
||||
}
|
||||
@ -5797,9 +5769,6 @@ __posix_memalign (void **memptr, size_t alignment, size_t size)
|
||||
{
|
||||
void *mem;
|
||||
|
||||
if (!__malloc_initialized)
|
||||
ptmalloc_init ();
|
||||
|
||||
/* Test whether the SIZE argument is valid. It must be a power of
|
||||
two multiple of sizeof (void *). */
|
||||
if (alignment % sizeof (void *) != 0
|
||||
@ -5840,11 +5809,6 @@ __malloc_info (int options, FILE *fp)
|
||||
size_t total_aspace = 0;
|
||||
size_t total_aspace_mprotect = 0;
|
||||
|
||||
|
||||
|
||||
if (!__malloc_initialized)
|
||||
ptmalloc_init ();
|
||||
|
||||
fputs ("<malloc version=\"1\">\n", fp);
|
||||
|
||||
/* Iterate over all arenas currently in use. */
|
||||
|
Reference in New Issue
Block a user