mirror of
https://sourceware.org/git/glibc.git
synced 2025-07-28 00:21:52 +03:00
Add single-threaded path to malloc/realloc/calloc/memalloc
This patch adds a single-threaded fast path to malloc, realloc, calloc and memalloc. When we're single-threaded, we can bypass arena_get (which always locks the arena it returns) and just use the main arena. Also avoid retrying a different arena since there is just the main arena. * malloc/malloc.c (__libc_malloc): Add SINGLE_THREAD_P path. (__libc_realloc): Likewise. (_mid_memalign): Likewise. (__libc_calloc): Likewise.
This commit is contained in:
@ -1,3 +1,10 @@
|
|||||||
|
2017-10-23 Wilco Dijkstra <wdijkstr@arm.com>
|
||||||
|
|
||||||
|
* malloc/malloc.c (__libc_malloc): Add SINGLE_THREAD_P path.
|
||||||
|
(__libc_realloc): Likewise.
|
||||||
|
(_mid_memalign): Likewise.
|
||||||
|
(__libc_calloc): Likewise.
|
||||||
|
|
||||||
2017-10-23 Mike FABIAN <mfabian@redhat.com>
|
2017-10-23 Mike FABIAN <mfabian@redhat.com>
|
||||||
|
|
||||||
* localedata/locales/tpi_PG (LC_MESSAGES): Fix yesexpr and noexpr
|
* localedata/locales/tpi_PG (LC_MESSAGES): Fix yesexpr and noexpr
|
||||||
|
@ -3038,6 +3038,14 @@ __libc_malloc (size_t bytes)
|
|||||||
DIAG_POP_NEEDS_COMMENT;
|
DIAG_POP_NEEDS_COMMENT;
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
if (SINGLE_THREAD_P)
|
||||||
|
{
|
||||||
|
victim = _int_malloc (&main_arena, bytes);
|
||||||
|
assert (!victim || chunk_is_mmapped (mem2chunk (victim)) ||
|
||||||
|
&main_arena == arena_for_chunk (mem2chunk (victim)));
|
||||||
|
return victim;
|
||||||
|
}
|
||||||
|
|
||||||
arena_get (ar_ptr, bytes);
|
arena_get (ar_ptr, bytes);
|
||||||
|
|
||||||
victim = _int_malloc (ar_ptr, bytes);
|
victim = _int_malloc (ar_ptr, bytes);
|
||||||
@ -3194,6 +3202,15 @@ __libc_realloc (void *oldmem, size_t bytes)
|
|||||||
return newmem;
|
return newmem;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (SINGLE_THREAD_P)
|
||||||
|
{
|
||||||
|
newp = _int_realloc (ar_ptr, oldp, oldsize, nb);
|
||||||
|
assert (!newp || chunk_is_mmapped (mem2chunk (newp)) ||
|
||||||
|
ar_ptr == arena_for_chunk (mem2chunk (newp)));
|
||||||
|
|
||||||
|
return newp;
|
||||||
|
}
|
||||||
|
|
||||||
__libc_lock_lock (ar_ptr->mutex);
|
__libc_lock_lock (ar_ptr->mutex);
|
||||||
|
|
||||||
newp = _int_realloc (ar_ptr, oldp, oldsize, nb);
|
newp = _int_realloc (ar_ptr, oldp, oldsize, nb);
|
||||||
@ -3269,6 +3286,15 @@ _mid_memalign (size_t alignment, size_t bytes, void *address)
|
|||||||
alignment = a;
|
alignment = a;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (SINGLE_THREAD_P)
|
||||||
|
{
|
||||||
|
p = _int_memalign (&main_arena, alignment, bytes);
|
||||||
|
assert (!p || chunk_is_mmapped (mem2chunk (p)) ||
|
||||||
|
&main_arena == arena_for_chunk (mem2chunk (p)));
|
||||||
|
|
||||||
|
return p;
|
||||||
|
}
|
||||||
|
|
||||||
arena_get (ar_ptr, bytes + alignment + MINSIZE);
|
arena_get (ar_ptr, bytes + alignment + MINSIZE);
|
||||||
|
|
||||||
p = _int_memalign (ar_ptr, alignment, bytes);
|
p = _int_memalign (ar_ptr, alignment, bytes);
|
||||||
@ -3361,7 +3387,11 @@ __libc_calloc (size_t n, size_t elem_size)
|
|||||||
|
|
||||||
MAYBE_INIT_TCACHE ();
|
MAYBE_INIT_TCACHE ();
|
||||||
|
|
||||||
arena_get (av, sz);
|
if (SINGLE_THREAD_P)
|
||||||
|
av = &main_arena;
|
||||||
|
else
|
||||||
|
arena_get (av, sz);
|
||||||
|
|
||||||
if (av)
|
if (av)
|
||||||
{
|
{
|
||||||
/* Check if we hand out the top chunk, in which case there may be no
|
/* Check if we hand out the top chunk, in which case there may be no
|
||||||
@ -3391,19 +3421,21 @@ __libc_calloc (size_t n, size_t elem_size)
|
|||||||
}
|
}
|
||||||
mem = _int_malloc (av, sz);
|
mem = _int_malloc (av, sz);
|
||||||
|
|
||||||
|
|
||||||
assert (!mem || chunk_is_mmapped (mem2chunk (mem)) ||
|
assert (!mem || chunk_is_mmapped (mem2chunk (mem)) ||
|
||||||
av == arena_for_chunk (mem2chunk (mem)));
|
av == arena_for_chunk (mem2chunk (mem)));
|
||||||
|
|
||||||
if (mem == 0 && av != NULL)
|
if (!SINGLE_THREAD_P)
|
||||||
{
|
{
|
||||||
LIBC_PROBE (memory_calloc_retry, 1, sz);
|
if (mem == 0 && av != NULL)
|
||||||
av = arena_get_retry (av, sz);
|
{
|
||||||
mem = _int_malloc (av, sz);
|
LIBC_PROBE (memory_calloc_retry, 1, sz);
|
||||||
}
|
av = arena_get_retry (av, sz);
|
||||||
|
mem = _int_malloc (av, sz);
|
||||||
|
}
|
||||||
|
|
||||||
if (av != NULL)
|
if (av != NULL)
|
||||||
__libc_lock_unlock (av->mutex);
|
__libc_lock_unlock (av->mutex);
|
||||||
|
}
|
||||||
|
|
||||||
/* Allocation failed even after a retry. */
|
/* Allocation failed even after a retry. */
|
||||||
if (mem == 0)
|
if (mem == 0)
|
||||||
|
Reference in New Issue
Block a user