1
0
mirror of https://sourceware.org/git/glibc.git synced 2025-10-12 19:04:54 +03:00

aarch64: use PTR_ARG and SIZE_ARG instead of DELOUSE

DELOUSE was added to asm code to make them compatible with non-LP64
ABIs, but it is an unfortunate name and the code was not compatible
with ABIs where pointer and size_t are different. Glibc currently
only supports the LP64 ABI so these macros are not really needed or
tested, but for now the name is changed to be more meaningful instead
of removing them completely.

Some DELOUSE macros were dropped: clone, strlen and strnlen used it
unnecessarily.

The out of tree ILP32 patches are currently not maintained and will
likely need a rework to rebase them on top of the time64 changes.
This commit is contained in:
Szabolcs Nagy
2020-12-17 10:03:05 +00:00
parent f9de8bfe1a
commit 45b1e17e91
28 changed files with 74 additions and 75 deletions

View File

@@ -46,7 +46,7 @@ ENTRY (__longjmp)
cfi_offset(d14, JB_D14<<3) cfi_offset(d14, JB_D14<<3)
cfi_offset(d15, JB_D15<<3) cfi_offset(d15, JB_D15<<3)
DELOUSE (0) PTR_ARG (0)
ldp x19, x20, [x0, #JB_X19<<3] ldp x19, x20, [x0, #JB_X19<<3]
ldp x21, x22, [x0, #JB_X21<<3] ldp x21, x22, [x0, #JB_X21<<3]

View File

@@ -75,7 +75,7 @@
.align 2 .align 2
_dl_tlsdesc_return: _dl_tlsdesc_return:
BTI_C BTI_C
DELOUSE (0) PTR_ARG (0)
ldr PTR_REG (0), [x0, #PTR_SIZE] ldr PTR_REG (0), [x0, #PTR_SIZE]
RET RET
cfi_endproc cfi_endproc
@@ -99,7 +99,7 @@ _dl_tlsdesc_undefweak:
BTI_C BTI_C
str x1, [sp, #-16]! str x1, [sp, #-16]!
cfi_adjust_cfa_offset (16) cfi_adjust_cfa_offset (16)
DELOUSE (0) PTR_ARG (0)
ldr PTR_REG (0), [x0, #PTR_SIZE] ldr PTR_REG (0), [x0, #PTR_SIZE]
mrs x1, tpidr_el0 mrs x1, tpidr_el0
sub PTR_REG (0), PTR_REG (0), PTR_REG (1) sub PTR_REG (0), PTR_REG (0), PTR_REG (1)
@@ -145,7 +145,7 @@ _dl_tlsdesc_undefweak:
.align 2 .align 2
_dl_tlsdesc_dynamic: _dl_tlsdesc_dynamic:
BTI_C BTI_C
DELOUSE (0) PTR_ARG (0)
/* Save just enough registers to support fast path, if we fall /* Save just enough registers to support fast path, if we fall
into slow path we will save additional registers. */ into slow path we will save additional registers. */

View File

@@ -61,8 +61,8 @@
string, counting trailing zeros identifies exactly which byte matched. */ string, counting trailing zeros identifies exactly which byte matched. */
ENTRY (MEMCHR) ENTRY (MEMCHR)
DELOUSE (0) PTR_ARG (0)
DELOUSE (2) SIZE_ARG (2)
bic src, srcin, 15 bic src, srcin, 15
cbz cntin, L(nomatch) cbz cntin, L(nomatch)
ld1 {vdata.16b}, [src] ld1 {vdata.16b}, [src]

View File

@@ -42,9 +42,9 @@
#define tmp2 x8 #define tmp2 x8
ENTRY_ALIGN (memcmp, 6) ENTRY_ALIGN (memcmp, 6)
DELOUSE (0) PTR_ARG (0)
DELOUSE (1) PTR_ARG (1)
DELOUSE (2) SIZE_ARG (2)
subs limit, limit, 16 subs limit, limit, 16
b.lo L(less16) b.lo L(less16)

View File

@@ -73,9 +73,9 @@
*/ */
ENTRY_ALIGN (MEMCPY, 6) ENTRY_ALIGN (MEMCPY, 6)
DELOUSE (0) PTR_ARG (0)
DELOUSE (1) PTR_ARG (1)
DELOUSE (2) SIZE_ARG (2)
add srcend, src, count add srcend, src, count
add dstend, dstin, count add dstend, dstin, count
@@ -209,9 +209,9 @@ END (MEMCPY)
libc_hidden_builtin_def (MEMCPY) libc_hidden_builtin_def (MEMCPY)
ENTRY_ALIGN (MEMMOVE, 4) ENTRY_ALIGN (MEMMOVE, 4)
DELOUSE (0) PTR_ARG (0)
DELOUSE (1) PTR_ARG (1)
DELOUSE (2) SIZE_ARG (2)
add srcend, src, count add srcend, src, count
add dstend, dstin, count add dstend, dstin, count

View File

@@ -59,8 +59,8 @@
string, counting trailing zeros identifies exactly which byte matched. */ string, counting trailing zeros identifies exactly which byte matched. */
ENTRY (__memrchr) ENTRY (__memrchr)
DELOUSE (0) PTR_ARG (0)
DELOUSE (2) SIZE_ARG (2)
add end, srcin, cntin add end, srcin, cntin
sub endm1, end, 1 sub endm1, end, 1
bic src, endm1, 15 bic src, endm1, 15

View File

@@ -31,8 +31,8 @@
ENTRY_ALIGN (MEMSET, 6) ENTRY_ALIGN (MEMSET, 6)
DELOUSE (0) PTR_ARG (0)
DELOUSE (2) SIZE_ARG (2)
dup v0.16B, valw dup v0.16B, valw
add dstend, dstin, count add dstend, dstin, count

View File

@@ -64,8 +64,8 @@
ENTRY_ALIGN (MEMCHR, 6) ENTRY_ALIGN (MEMCHR, 6)
DELOUSE (0) PTR_ARG (0)
DELOUSE (2) SIZE_ARG (2)
/* Do not dereference srcin if no bytes to compare. */ /* Do not dereference srcin if no bytes to compare. */
cbz cntin, L(none_chr) cbz cntin, L(none_chr)

View File

@@ -64,9 +64,9 @@
from the end. */ from the end. */
ENTRY (__memcpy_simd) ENTRY (__memcpy_simd)
DELOUSE (0) PTR_ARG (0)
DELOUSE (1) PTR_ARG (1)
DELOUSE (2) SIZE_ARG (2)
add srcend, src, count add srcend, src, count
add dstend, dstin, count add dstend, dstin, count
@@ -181,9 +181,9 @@ libc_hidden_builtin_def (__memcpy_simd)
ENTRY (__memmove_simd) ENTRY (__memmove_simd)
DELOUSE (0) PTR_ARG (0)
DELOUSE (1) PTR_ARG (1)
DELOUSE (2) SIZE_ARG (2)
add srcend, src, count add srcend, src, count
add dstend, dstin, count add dstend, dstin, count

View File

@@ -73,9 +73,9 @@
#if IS_IN (libc) #if IS_IN (libc)
ENTRY_ALIGN (__memcpy_falkor, 6) ENTRY_ALIGN (__memcpy_falkor, 6)
DELOUSE (0) PTR_ARG (0)
DELOUSE (1) PTR_ARG (1)
DELOUSE (2) SIZE_ARG (2)
cmp count, 32 cmp count, 32
add srcend, src, count add srcend, src, count
@@ -218,9 +218,9 @@ libc_hidden_builtin_def (__memcpy_falkor)
ENTRY_ALIGN (__memmove_falkor, 6) ENTRY_ALIGN (__memmove_falkor, 6)
DELOUSE (0) PTR_ARG (0)
DELOUSE (1) PTR_ARG (1)
DELOUSE (2) SIZE_ARG (2)
cmp count, 32 cmp count, 32
add srcend, src, count add srcend, src, count

View File

@@ -81,9 +81,9 @@
ENTRY_ALIGN (MEMMOVE, 6) ENTRY_ALIGN (MEMMOVE, 6)
DELOUSE (0) PTR_ARG (0)
DELOUSE (1) PTR_ARG (1)
DELOUSE (2) SIZE_ARG (2)
sub tmp1, dstin, src sub tmp1, dstin, src
cmp count, 96 cmp count, 96
@@ -95,9 +95,9 @@ END (MEMMOVE)
libc_hidden_builtin_def (MEMMOVE) libc_hidden_builtin_def (MEMMOVE)
ENTRY (MEMCPY) ENTRY (MEMCPY)
DELOUSE (0) PTR_ARG (0)
DELOUSE (1) PTR_ARG (1)
DELOUSE (2) SIZE_ARG (2)
prfm PLDL1KEEP, [src] prfm PLDL1KEEP, [src]
add srcend, src, count add srcend, src, count

View File

@@ -97,9 +97,9 @@
ENTRY_ALIGN (MEMMOVE, 6) ENTRY_ALIGN (MEMMOVE, 6)
DELOUSE (0) PTR_ARG (0)
DELOUSE (1) PTR_ARG (1)
DELOUSE (2) SIZE_ARG (2)
add srcend, src, count add srcend, src, count
cmp count, 16 cmp count, 16
@@ -127,9 +127,9 @@ libc_hidden_builtin_def (MEMMOVE)
.p2align 4 .p2align 4
ENTRY (MEMCPY) ENTRY (MEMCPY)
DELOUSE (0) PTR_ARG (0)
DELOUSE (1) PTR_ARG (1)
DELOUSE (2) SIZE_ARG (2)
add srcend, src, count add srcend, src, count
cmp count, 16 cmp count, 16

View File

@@ -36,8 +36,8 @@
ENTRY_ALIGN (MEMSET, 6) ENTRY_ALIGN (MEMSET, 6)
DELOUSE (0) PTR_ARG (0)
DELOUSE (2) SIZE_ARG (2)
bfi valw, valw, 8, 8 bfi valw, valw, 8, 8
bfi valw, valw, 16, 16 bfi valw, valw, 16, 16

View File

@@ -31,8 +31,8 @@
ENTRY_ALIGN (MEMSET, 6) ENTRY_ALIGN (MEMSET, 6)
DELOUSE (0) PTR_ARG (0)
DELOUSE (2) SIZE_ARG (2)
dup v0.16B, valw dup v0.16B, valw
add dstend, dstin, count add dstend, dstin, count

View File

@@ -86,7 +86,7 @@
character, return the length, if not, continue in the main loop. */ character, return the length, if not, continue in the main loop. */
ENTRY (__strlen_asimd) ENTRY (__strlen_asimd)
DELOUSE (0) PTR_ARG (0)
and tmp1, srcin, MIN_PAGE_SIZE - 1 and tmp1, srcin, MIN_PAGE_SIZE - 1
cmp tmp1, MIN_PAGE_SIZE - 32 cmp tmp1, MIN_PAGE_SIZE - 32

View File

@@ -33,7 +33,7 @@ END (_setjmp)
libc_hidden_def (_setjmp) libc_hidden_def (_setjmp)
ENTRY (__sigsetjmp) ENTRY (__sigsetjmp)
DELOUSE (0) PTR_ARG (0)
1: 1:
stp x19, x20, [x0, #JB_X19<<3] stp x19, x20, [x0, #JB_X19<<3]

View File

@@ -56,7 +56,7 @@
string, counting trailing zeros identifies exactly which byte matched. */ string, counting trailing zeros identifies exactly which byte matched. */
ENTRY (strchr) ENTRY (strchr)
DELOUSE (0) PTR_ARG (0)
bic src, srcin, 15 bic src, srcin, 15
dup vrepchr.16b, chrin dup vrepchr.16b, chrin
ld1 {vdata.16b}, [src] ld1 {vdata.16b}, [src]

View File

@@ -54,7 +54,7 @@
string, counting trailing zeros identifies exactly which byte matched. */ string, counting trailing zeros identifies exactly which byte matched. */
ENTRY (__strchrnul) ENTRY (__strchrnul)
DELOUSE (0) PTR_ARG (0)
bic src, srcin, 15 bic src, srcin, 15
dup vrepchr.16b, chrin dup vrepchr.16b, chrin
ld1 {vdata.16b}, [src] ld1 {vdata.16b}, [src]

View File

@@ -62,8 +62,8 @@
NUL too in big-endian, byte-reverse the data before the NUL check. */ NUL too in big-endian, byte-reverse the data before the NUL check. */
ENTRY(strcmp) ENTRY(strcmp)
DELOUSE (0) PTR_ARG (0)
DELOUSE (1) PTR_ARG (1)
sub off2, src2, src1 sub off2, src2, src1
mov zeroones, REP8_01 mov zeroones, REP8_01
and tmp, src1, 7 and tmp, src1, 7

View File

@@ -73,8 +73,8 @@
string, counting trailing zeros identifies exactly which byte matched. */ string, counting trailing zeros identifies exactly which byte matched. */
ENTRY (STRCPY) ENTRY (STRCPY)
DELOUSE (0) PTR_ARG (0)
DELOUSE (1) PTR_ARG (1)
bic src, srcin, 15 bic src, srcin, 15
mov wtmp, 0xf00f mov wtmp, 0xf00f
ld1 {vdata.16b}, [src] ld1 {vdata.16b}, [src]

View File

@@ -54,8 +54,7 @@
string, counting trailing zeros identifies exactly which byte matched. */ string, counting trailing zeros identifies exactly which byte matched. */
ENTRY (STRLEN) ENTRY (STRLEN)
DELOUSE (0) PTR_ARG (0)
DELOUSE (1)
bic src, srcin, 15 bic src, srcin, 15
mov wtmp, 0xf00f mov wtmp, 0xf00f
ld1 {vdata.16b}, [src] ld1 {vdata.16b}, [src]

View File

@@ -55,9 +55,8 @@
#define REP8_80 0x8080808080808080 #define REP8_80 0x8080808080808080
ENTRY_ALIGN_AND_PAD (__strnlen, 6, 9) ENTRY_ALIGN_AND_PAD (__strnlen, 6, 9)
DELOUSE (0) PTR_ARG (0)
DELOUSE (1) SIZE_ARG (1)
DELOUSE (2)
cbz limit, L(hit_limit) cbz limit, L(hit_limit)
mov zeroones, #REP8_01 mov zeroones, #REP8_01
bic src, srcin, #15 bic src, srcin, #15

View File

@@ -59,7 +59,7 @@
if the relevant byte matched the NUL end of string. */ if the relevant byte matched the NUL end of string. */
ENTRY(strrchr) ENTRY(strrchr)
DELOUSE (0) PTR_ARG (0)
bic src, srcin, 15 bic src, srcin, 15
dup vrepchr.16b, chrin dup vrepchr.16b, chrin
mov wtmp, 0x3003 mov wtmp, 0x3003

View File

@@ -25,12 +25,14 @@
# define AARCH64_R(NAME) R_AARCH64_ ## NAME # define AARCH64_R(NAME) R_AARCH64_ ## NAME
# define PTR_REG(n) x##n # define PTR_REG(n) x##n
# define PTR_LOG_SIZE 3 # define PTR_LOG_SIZE 3
# define DELOUSE(n) # define PTR_ARG(n)
# define SIZE_ARG(n)
#else #else
# define AARCH64_R(NAME) R_AARCH64_P32_ ## NAME # define AARCH64_R(NAME) R_AARCH64_P32_ ## NAME
# define PTR_REG(n) w##n # define PTR_REG(n) w##n
# define PTR_LOG_SIZE 2 # define PTR_LOG_SIZE 2
# define DELOUSE(n) mov w##n, w##n # define PTR_ARG(n) mov w##n, w##n
# define SIZE_ARG(n) mov w##n, w##n
#endif #endif
#define PTR_SIZE (1<<PTR_LOG_SIZE) #define PTR_SIZE (1<<PTR_LOG_SIZE)

View File

@@ -33,13 +33,12 @@
*/ */
.text .text
ENTRY(__clone) ENTRY(__clone)
DELOUSE (0) PTR_ARG (0)
DELOUSE (1) PTR_ARG (1)
DELOUSE (2) PTR_ARG (3)
DELOUSE (3) PTR_ARG (4)
DELOUSE (4) PTR_ARG (5)
DELOUSE (5) PTR_ARG (6)
DELOUSE (6)
/* Save args for the child. */ /* Save args for the child. */
mov x10, x0 mov x10, x0
mov x11, x2 mov x11, x2

View File

@@ -30,7 +30,7 @@
.text .text
ENTRY(__getcontext) ENTRY(__getcontext)
DELOUSE (0) PTR_ARG (0)
/* The saved context will return to the getcontext() call point /* The saved context will return to the getcontext() call point
with a return value of 0 */ with a return value of 0 */
str xzr, [x0, oX0 + 0 * SZREG] str xzr, [x0, oX0 + 0 * SZREG]

View File

@@ -34,7 +34,7 @@
.text .text
ENTRY (__setcontext) ENTRY (__setcontext)
DELOUSE (0) PTR_ARG (0)
/* Save a copy of UCP. */ /* Save a copy of UCP. */
mov x9, x0 mov x9, x0

View File

@@ -27,7 +27,7 @@
.text .text
ENTRY(__swapcontext) ENTRY(__swapcontext)
DELOUSE (0) PTR_ARG (0)
/* Set the value returned when swapcontext() returns in this context. /* Set the value returned when swapcontext() returns in this context.
And set up x1 to become the return address of the caller, so we And set up x1 to become the return address of the caller, so we
can return there with a normal RET instead of an indirect jump. */ can return there with a normal RET instead of an indirect jump. */