1
0
mirror of https://sourceware.org/git/glibc.git synced 2025-08-08 17:42:12 +03:00

malloc: Improve checked_request2size

Change checked_request2size to return SIZE_MAX for huge inputs.  This
ensures large allocation requests stay large and can't be confused with a
small allocation.  As a result several existing checks against PTRDIFF_MAX
become redundant.

Reviewed-by: DJ Delorie <dj@redhat.com>
This commit is contained in:
Wilco Dijkstra
2025-06-06 17:11:36 +00:00
parent 21fda179c2
commit b68b125ad1
2 changed files with 13 additions and 28 deletions

View File

@@ -275,12 +275,12 @@ realloc_check (void *oldmem, size_t bytes)
malloc_printerr ("realloc(): invalid pointer"); malloc_printerr ("realloc(): invalid pointer");
const INTERNAL_SIZE_T oldsize = chunksize (oldp); const INTERNAL_SIZE_T oldsize = chunksize (oldp);
chnb = checked_request2size (rb); if (rb > PTRDIFF_MAX)
if (chnb == 0)
{ {
__set_errno (ENOMEM); __set_errno (ENOMEM);
goto invert; goto invert;
} }
chnb = checked_request2size (rb);
__libc_lock_lock (main_arena.mutex); __libc_lock_lock (main_arena.mutex);

View File

@@ -1323,8 +1323,8 @@ nextchunk-> +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
/* Check if REQ overflows when padded and aligned and if the resulting /* Check if REQ overflows when padded and aligned and if the resulting
value is less than PTRDIFF_T. Returns the requested size or value is less than PTRDIFF_T. Returns the requested size or
MINSIZE in case the value is less than MINSIZE, or 0 if any of the MINSIZE in case the value is less than MINSIZE, or SIZE_MAX if any
previous checks fail. */ of the previous checks fail. */
static __always_inline size_t static __always_inline size_t
checked_request2size (size_t req) __nonnull (1) checked_request2size (size_t req) __nonnull (1)
{ {
@@ -1332,7 +1332,7 @@ checked_request2size (size_t req) __nonnull (1)
"PTRDIFF_MAX is not more than half of SIZE_MAX"); "PTRDIFF_MAX is not more than half of SIZE_MAX");
if (__glibc_unlikely (req > PTRDIFF_MAX)) if (__glibc_unlikely (req > PTRDIFF_MAX))
return 0; return SIZE_MAX;
/* When using tagged memory, we cannot share the end of the user /* When using tagged memory, we cannot share the end of the user
block with the header for the next chunk, so ensure that we block with the header for the next chunk, so ensure that we
@@ -3462,11 +3462,6 @@ __libc_malloc (size_t bytes)
{ {
#if USE_TCACHE #if USE_TCACHE
size_t nb = checked_request2size (bytes); size_t nb = checked_request2size (bytes);
if (nb == 0)
{
__set_errno (ENOMEM);
return NULL;
}
if (nb < mp_.tcache_max_bytes) if (nb < mp_.tcache_max_bytes)
{ {
@@ -3611,12 +3606,12 @@ __libc_realloc (void *oldmem, size_t bytes)
|| misaligned_chunk (oldp))) || misaligned_chunk (oldp)))
malloc_printerr ("realloc(): invalid pointer"); malloc_printerr ("realloc(): invalid pointer");
nb = checked_request2size (bytes); if (bytes > PTRDIFF_MAX)
if (nb == 0)
{ {
__set_errno (ENOMEM); __set_errno (ENOMEM);
return NULL; return NULL;
} }
nb = checked_request2size (bytes);
if (chunk_is_mmapped (oldp)) if (chunk_is_mmapped (oldp))
{ {
@@ -3742,13 +3737,7 @@ _mid_memalign (size_t alignment, size_t bytes)
} }
#if USE_TCACHE #if USE_TCACHE
size_t nb = checked_request2size (bytes); void *victim = tcache_get_align (checked_request2size (bytes), alignment);
if (nb == 0)
{
__set_errno (ENOMEM);
return NULL;
}
void *victim = tcache_get_align (nb, alignment);
if (victim != NULL) if (victim != NULL)
return tag_new_usable (victim); return tag_new_usable (victim);
#endif #endif
@@ -3909,11 +3898,7 @@ __libc_calloc (size_t n, size_t elem_size)
#if USE_TCACHE #if USE_TCACHE
size_t nb = checked_request2size (bytes); size_t nb = checked_request2size (bytes);
if (nb == 0)
{
__set_errno (ENOMEM);
return NULL;
}
if (nb < mp_.tcache_max_bytes) if (nb < mp_.tcache_max_bytes)
{ {
if (__glibc_unlikely (tcache == NULL)) if (__glibc_unlikely (tcache == NULL))
@@ -3988,12 +3973,12 @@ _int_malloc (mstate av, size_t bytes)
aligned. aligned.
*/ */
nb = checked_request2size (bytes); if (bytes > PTRDIFF_MAX)
if (nb == 0)
{ {
__set_errno (ENOMEM); __set_errno (ENOMEM);
return NULL; return NULL;
} }
nb = checked_request2size (bytes);
/* There are no usable arenas. Fall back to sysmalloc to get a chunk from /* There are no usable arenas. Fall back to sysmalloc to get a chunk from
mmap. */ mmap. */
@@ -5148,12 +5133,12 @@ _int_memalign (mstate av, size_t alignment, size_t bytes)
unsigned long remainder_size; /* its size */ unsigned long remainder_size; /* its size */
INTERNAL_SIZE_T size; INTERNAL_SIZE_T size;
nb = checked_request2size (bytes); if (bytes > PTRDIFF_MAX)
if (nb == 0)
{ {
__set_errno (ENOMEM); __set_errno (ENOMEM);
return NULL; return NULL;
} }
nb = checked_request2size (bytes);
/* We can't check tcache here because we hold the arena lock, which /* We can't check tcache here because we hold the arena lock, which
tcache doesn't expect. We expect it has been checked tcache doesn't expect. We expect it has been checked