mirror of
https://github.com/Mbed-TLS/mbedtls.git
synced 2025-07-29 11:41:15 +03:00
Use UINTPTR_MAX not SIZE_MAX
Signed-off-by: Dave Rodgman <dave.rodgman@arm.com>
This commit is contained in:
@ -63,8 +63,9 @@
|
|||||||
* only used here.
|
* only used here.
|
||||||
*/
|
*/
|
||||||
#if defined(MBEDTLS_EFFICIENT_UNALIGNED_ACCESS) && defined(MBEDTLS_HAVE_ASM)
|
#if defined(MBEDTLS_EFFICIENT_UNALIGNED_ACCESS) && defined(MBEDTLS_HAVE_ASM)
|
||||||
#if ((defined(__arm__) || defined(__thumb__) || defined(__thumb2__)) && (SIZE_MAX == 0xffffffff)) || \
|
#if ((defined(__arm__) || defined(__thumb__) || defined(__thumb2__)) && (UINTPTR_MAX == 0xfffffffful)) || \
|
||||||
(defined(__aarch64__) && ((SIZE_MAX == 0xffffffff) || (SIZE_MAX == 0xffffffffffffffff)))
|
(defined(__aarch64__) && ((UINTPTR_MAX == 0xffffffffull) || (UINTPTR_MAX == 0xffffffffffffffffull)))
|
||||||
|
/* We check pointer sizes to avoid issues with them not matching register size requirements */
|
||||||
#define MBEDTLS_EFFICIENT_UNALIGNED_VOLATILE_ACCESS
|
#define MBEDTLS_EFFICIENT_UNALIGNED_VOLATILE_ACCESS
|
||||||
#endif
|
#endif
|
||||||
#endif
|
#endif
|
||||||
@ -80,10 +81,11 @@ static inline uint32_t mbedtls_get_unaligned_volatile_uint32(volatile const unsi
|
|||||||
#if defined(__arm__) || defined(__thumb__) || defined(__thumb2__)
|
#if defined(__arm__) || defined(__thumb__) || defined(__thumb2__)
|
||||||
asm volatile ("ldr %0, [%1]" : "=r" (r) : "r" (p) :);
|
asm volatile ("ldr %0, [%1]" : "=r" (r) : "r" (p) :);
|
||||||
#elif defined(__aarch64__)
|
#elif defined(__aarch64__)
|
||||||
#if (SIZE_MAX == 0xffffffff)
|
#if (UINTPTR_MAX == 0xfffffffful)
|
||||||
/* ILP32: Specify the pointer operand slightly differently, as per #7787. */
|
/* ILP32: Specify the pointer operand slightly differently, as per #7787. */
|
||||||
asm volatile ("ldr %w0, [%1]" : "=r" (r) : "p" (p) :);
|
asm volatile ("ldr %w0, [%1]" : "=r" (r) : "p" (p) :);
|
||||||
#else
|
#elif (UINTPTR_MAX == 0xffffffffffffffffull)
|
||||||
|
/* aarch64 with 64-bit pointers */
|
||||||
asm volatile ("ldr %w0, [%1]" : "=r" (r) : "r" (p) :);
|
asm volatile ("ldr %w0, [%1]" : "=r" (r) : "r" (p) :);
|
||||||
#endif
|
#endif
|
||||||
#endif
|
#endif
|
||||||
|
Reference in New Issue
Block a user