diff --git a/sysdeps/aarch64/__longjmp.S b/sysdeps/aarch64/__longjmp.S index 38efddbbae..b79a76b7ce 100644 --- a/sysdeps/aarch64/__longjmp.S +++ b/sysdeps/aarch64/__longjmp.S @@ -47,8 +47,6 @@ ENTRY (__longjmp) cfi_offset(d14, JB_D14<<3) cfi_offset(d15, JB_D15<<3) - PTR_ARG (0) - #if IS_IN(libc) /* Disable ZA state of SME in libc.a and libc.so, but not in ld.so. */ # if HAVE_AARCH64_PAC_RET diff --git a/sysdeps/aarch64/__mtag_tag_region.S b/sysdeps/aarch64/__mtag_tag_region.S index 9b1059675c..fcd8191a76 100644 --- a/sysdeps/aarch64/__mtag_tag_region.S +++ b/sysdeps/aarch64/__mtag_tag_region.S @@ -40,9 +40,6 @@ #define zva_val x4 ENTRY (__libc_mtag_tag_region) - PTR_ARG (0) - SIZE_ARG (1) - add dstend, dstin, count cmp count, 96 diff --git a/sysdeps/aarch64/__mtag_tag_zero_region.S b/sysdeps/aarch64/__mtag_tag_zero_region.S index e7d411706e..3b49e19843 100644 --- a/sysdeps/aarch64/__mtag_tag_zero_region.S +++ b/sysdeps/aarch64/__mtag_tag_zero_region.S @@ -40,9 +40,6 @@ #define zva_val x4 ENTRY (__libc_mtag_tag_zero_region) - PTR_ARG (0) - SIZE_ARG (1) - add dstend, dstin, count cmp count, 96 diff --git a/sysdeps/aarch64/dl-start.S b/sysdeps/aarch64/dl-start.S index a249fda21c..b26549bd31 100644 --- a/sysdeps/aarch64/dl-start.S +++ b/sysdeps/aarch64/dl-start.S @@ -28,7 +28,6 @@ ENTRY (_start) /* Load and relocate all library dependencies. */ mov x0, sp - PTR_ARG (0) bl _dl_start /* Returns user entry point in x0. */ mov PTR_REG (21), PTR_REG (0) diff --git a/sysdeps/aarch64/dl-tlsdesc.S b/sysdeps/aarch64/dl-tlsdesc.S index 6045b2e0a9..76048950c0 100644 --- a/sysdeps/aarch64/dl-tlsdesc.S +++ b/sysdeps/aarch64/dl-tlsdesc.S @@ -75,7 +75,6 @@ .align 2 _dl_tlsdesc_return: BTI_C - PTR_ARG (0) ldr PTR_REG (0), [x0, #PTR_SIZE] RET cfi_endproc @@ -99,7 +98,6 @@ _dl_tlsdesc_undefweak: BTI_C str x1, [sp, #-16]! cfi_adjust_cfa_offset (16) - PTR_ARG (0) ldr PTR_REG (0), [x0, #PTR_SIZE] mrs x1, tpidr_el0 sub PTR_REG (0), PTR_REG (0), PTR_REG (1) @@ -145,7 +143,6 @@ _dl_tlsdesc_undefweak: .align 2 _dl_tlsdesc_dynamic: BTI_C - PTR_ARG (0) /* Save just enough registers to support fast path, if we fall into slow path we will save additional registers. */ diff --git a/sysdeps/aarch64/memchr.S b/sysdeps/aarch64/memchr.S index e67c3591c2..76ed5f4549 100644 --- a/sysdeps/aarch64/memchr.S +++ b/sysdeps/aarch64/memchr.S @@ -57,8 +57,6 @@ exactly which byte matched. */ ENTRY (MEMCHR) - PTR_ARG (0) - SIZE_ARG (2) bic src, srcin, 15 cbz cntin, L(nomatch) ld1 {vdata.16b}, [src] diff --git a/sysdeps/aarch64/memcmp.S b/sysdeps/aarch64/memcmp.S index 471dc823da..a7482f60df 100644 --- a/sysdeps/aarch64/memcmp.S +++ b/sysdeps/aarch64/memcmp.S @@ -44,10 +44,6 @@ ENTRY (memcmp) - PTR_ARG (0) - PTR_ARG (1) - SIZE_ARG (2) - cmp limit, 16 b.lo L(less16) ldp data1, data3, [src1] diff --git a/sysdeps/aarch64/memcpy.S b/sysdeps/aarch64/memcpy.S index 725705c8df..d41e617bf0 100644 --- a/sysdeps/aarch64/memcpy.S +++ b/sysdeps/aarch64/memcpy.S @@ -70,10 +70,6 @@ from the end. */ ENTRY (MEMCPY) - PTR_ARG (0) - PTR_ARG (1) - SIZE_ARG (2) - add srcend, src, count add dstend, dstin, count cmp count, 128 @@ -187,10 +183,6 @@ libc_hidden_builtin_def (MEMCPY) ENTRY (MEMMOVE) - PTR_ARG (0) - PTR_ARG (1) - SIZE_ARG (2) - add srcend, src, count add dstend, dstin, count cmp count, 128 diff --git a/sysdeps/aarch64/memrchr.S b/sysdeps/aarch64/memrchr.S index 229a8a089c..2586c4502d 100644 --- a/sysdeps/aarch64/memrchr.S +++ b/sysdeps/aarch64/memrchr.S @@ -55,8 +55,6 @@ exactly which byte matched. */ ENTRY (__memrchr) - PTR_ARG (0) - SIZE_ARG (2) add end, srcin, cntin sub endm1, end, 1 bic src, endm1, 15 diff --git a/sysdeps/aarch64/memset.S b/sysdeps/aarch64/memset.S index 9a5a89ebc5..5bd1bc7fda 100644 --- a/sysdeps/aarch64/memset.S +++ b/sysdeps/aarch64/memset.S @@ -39,9 +39,6 @@ #define dstend2 x5 ENTRY (MEMSET) - PTR_ARG (0) - SIZE_ARG (2) - dup v0.16B, valw cmp count, 16 b.lo L(set_small) diff --git a/sysdeps/aarch64/multiarch/memchr_nosimd.S b/sysdeps/aarch64/multiarch/memchr_nosimd.S index 03e2852eb9..86892b2276 100644 --- a/sysdeps/aarch64/multiarch/memchr_nosimd.S +++ b/sysdeps/aarch64/multiarch/memchr_nosimd.S @@ -60,9 +60,6 @@ ENTRY (__memchr_nosimd) - PTR_ARG (0) - SIZE_ARG (2) - /* Do not dereference srcin if no bytes to compare. */ cbz cntin, L(none_chr) diff --git a/sysdeps/aarch64/multiarch/memcpy_a64fx.S b/sysdeps/aarch64/multiarch/memcpy_a64fx.S index 0be269cf8b..ed18682700 100644 --- a/sysdeps/aarch64/multiarch/memcpy_a64fx.S +++ b/sysdeps/aarch64/multiarch/memcpy_a64fx.S @@ -96,10 +96,6 @@ ENTRY (__memcpy_a64fx) - PTR_ARG (0) - PTR_ARG (1) - SIZE_ARG (2) - cntb vlen cmp n, vlen, lsl 1 b.hi L(copy_small) @@ -236,10 +232,6 @@ END (__memcpy_a64fx) ENTRY_ALIGN (__memmove_a64fx, 4) - PTR_ARG (0) - PTR_ARG (1) - SIZE_ARG (2) - /* Fast case for up to 2 vectors. */ cntb vlen cmp n, vlen, lsl 1 diff --git a/sysdeps/aarch64/multiarch/memcpy_mops.S b/sysdeps/aarch64/multiarch/memcpy_mops.S index 85a06332c7..664f7b4a28 100644 --- a/sysdeps/aarch64/multiarch/memcpy_mops.S +++ b/sysdeps/aarch64/multiarch/memcpy_mops.S @@ -26,10 +26,6 @@ */ ENTRY (__memcpy_mops) - PTR_ARG (0) - PTR_ARG (1) - SIZE_ARG (2) - mov x3, x0 .inst 0x19010443 /* cpyfp [x3]!, [x1]!, x2! */ .inst 0x19410443 /* cpyfm [x3]!, [x1]!, x2! */ diff --git a/sysdeps/aarch64/multiarch/memcpy_oryon1.S b/sysdeps/aarch64/multiarch/memcpy_oryon1.S index bda5ed0139..e86d8b04f5 100644 --- a/sysdeps/aarch64/multiarch/memcpy_oryon1.S +++ b/sysdeps/aarch64/multiarch/memcpy_oryon1.S @@ -63,10 +63,6 @@ ENTRY (__memmove_oryon1) - PTR_ARG (0) - PTR_ARG (1) - SIZE_ARG (2) - sub tmp1, dstin, src cmp count, 96 ccmp tmp1, count, 2, hi @@ -77,10 +73,6 @@ END (__memmove_oryon1) ENTRY (__memcpy_oryon1) - PTR_ARG (0) - PTR_ARG (1) - SIZE_ARG (2) - add srcend, src, count add dstend, dstin, count cmp count, 16 diff --git a/sysdeps/aarch64/multiarch/memcpy_sve.S b/sysdeps/aarch64/multiarch/memcpy_sve.S index f36248c7d9..26d4890d2c 100644 --- a/sysdeps/aarch64/multiarch/memcpy_sve.S +++ b/sysdeps/aarch64/multiarch/memcpy_sve.S @@ -61,10 +61,6 @@ .arch armv8.2-a+sve ENTRY (__memcpy_sve) - PTR_ARG (0) - PTR_ARG (1) - SIZE_ARG (2) - cmp count, 128 b.hi L(copy_long) cntb vlen @@ -144,10 +140,6 @@ END (__memcpy_sve) ENTRY (__memmove_sve) - PTR_ARG (0) - PTR_ARG (1) - SIZE_ARG (2) - cmp count, 128 b.hi L(move_long) cntb vlen diff --git a/sysdeps/aarch64/multiarch/memmove_mops.S b/sysdeps/aarch64/multiarch/memmove_mops.S index 2768096e08..ec650a54cb 100644 --- a/sysdeps/aarch64/multiarch/memmove_mops.S +++ b/sysdeps/aarch64/multiarch/memmove_mops.S @@ -26,10 +26,6 @@ */ ENTRY (__memmove_mops) - PTR_ARG (0) - PTR_ARG (1) - SIZE_ARG (2) - mov x3, x0 .inst 0x1d010443 /* cpyp [x3]!, [x1]!, x2! */ .inst 0x1d410443 /* cpym [x3]!, [x1]!, x2! */ diff --git a/sysdeps/aarch64/multiarch/memset_a64fx.S b/sysdeps/aarch64/multiarch/memset_a64fx.S index 7f369977a7..ea60b78e69 100644 --- a/sysdeps/aarch64/multiarch/memset_a64fx.S +++ b/sysdeps/aarch64/multiarch/memset_a64fx.S @@ -55,8 +55,6 @@ #define BTI_C ENTRY (__memset_a64fx) - PTR_ARG (0) - SIZE_ARG (2) cntb vector_length dup z0.b, valw diff --git a/sysdeps/aarch64/multiarch/memset_emag.S b/sysdeps/aarch64/multiarch/memset_emag.S index d1d9be6211..ef7146404e 100644 --- a/sysdeps/aarch64/multiarch/memset_emag.S +++ b/sysdeps/aarch64/multiarch/memset_emag.S @@ -34,9 +34,6 @@ ENTRY (__memset_emag) - PTR_ARG (0) - SIZE_ARG (2) - bfi valw, valw, 8, 8 bfi valw, valw, 16, 16 bfi val, val, 32, 32 diff --git a/sysdeps/aarch64/multiarch/memset_kunpeng.S b/sysdeps/aarch64/multiarch/memset_kunpeng.S index fb3202b1e4..837b8f10c5 100644 --- a/sysdeps/aarch64/multiarch/memset_kunpeng.S +++ b/sysdeps/aarch64/multiarch/memset_kunpeng.S @@ -33,9 +33,6 @@ ENTRY (__memset_kunpeng) - PTR_ARG (0) - SIZE_ARG (2) - dup v0.16B, valw add dstend, dstin, count diff --git a/sysdeps/aarch64/multiarch/memset_mops.S b/sysdeps/aarch64/multiarch/memset_mops.S index 83cf3784a0..283fd4996c 100644 --- a/sysdeps/aarch64/multiarch/memset_mops.S +++ b/sysdeps/aarch64/multiarch/memset_mops.S @@ -26,9 +26,6 @@ */ ENTRY (__memset_mops) - PTR_ARG (0) - SIZE_ARG (2) - mov x3, x0 .inst 0x19c10443 /* setp [x3]!, x2!, x1 */ .inst 0x19c14443 /* setm [x3]!, x2!, x1 */ diff --git a/sysdeps/aarch64/multiarch/memset_oryon1.S b/sysdeps/aarch64/multiarch/memset_oryon1.S index 8e032f131c..0f9b718f25 100644 --- a/sysdeps/aarch64/multiarch/memset_oryon1.S +++ b/sysdeps/aarch64/multiarch/memset_oryon1.S @@ -33,9 +33,6 @@ ENTRY (__memset_oryon1) - PTR_ARG (0) - SIZE_ARG (2) - bfi valw, valw, 8, 8 bfi valw, valw, 16, 16 bfi val, val, 32, 32 diff --git a/sysdeps/aarch64/multiarch/strlen_asimd.S b/sysdeps/aarch64/multiarch/strlen_asimd.S index 4eb91c9885..457e7216f3 100644 --- a/sysdeps/aarch64/multiarch/strlen_asimd.S +++ b/sysdeps/aarch64/multiarch/strlen_asimd.S @@ -87,7 +87,6 @@ character, return the length, if not, continue in the main loop. */ ENTRY (__strlen_asimd) - PTR_ARG (0) and tmp1, srcin, MIN_PAGE_SIZE - 1 cmp tmp1, MIN_PAGE_SIZE - 32 b.hi L(page_cross) diff --git a/sysdeps/aarch64/setjmp.S b/sysdeps/aarch64/setjmp.S index b630ca099a..73e204c349 100644 --- a/sysdeps/aarch64/setjmp.S +++ b/sysdeps/aarch64/setjmp.S @@ -34,8 +34,6 @@ END (_setjmp) libc_hidden_def (_setjmp) ENTRY (__sigsetjmp) - PTR_ARG (0) - 1: stp x19, x20, [x0, #JB_X19<<3] stp x21, x22, [x0, #JB_X21<<3] diff --git a/sysdeps/aarch64/strchr.S b/sysdeps/aarch64/strchr.S index 8fb7c33223..878bd97f19 100644 --- a/sysdeps/aarch64/strchr.S +++ b/sysdeps/aarch64/strchr.S @@ -52,7 +52,6 @@ If it is not a multiple of 4, there was no match. */ ENTRY (strchr) - PTR_ARG (0) bic src, srcin, 15 dup vrepchr.16b, chrin ld1 {vdata.16b}, [src] diff --git a/sysdeps/aarch64/strchrnul.S b/sysdeps/aarch64/strchrnul.S index 7862cdfd70..39c9909d06 100644 --- a/sysdeps/aarch64/strchrnul.S +++ b/sysdeps/aarch64/strchrnul.S @@ -51,7 +51,6 @@ exactly which byte matched. */ ENTRY (__strchrnul) - PTR_ARG (0) bic src, srcin, 15 dup vrepchr.16b, chrin ld1 {vdata.16b}, [src] diff --git a/sysdeps/aarch64/strcmp.S b/sysdeps/aarch64/strcmp.S index d724586d39..d07ab2e6ef 100644 --- a/sysdeps/aarch64/strcmp.S +++ b/sysdeps/aarch64/strcmp.S @@ -62,8 +62,6 @@ NUL too in big-endian, byte-reverse the data before the NUL check. */ ENTRY(strcmp) - PTR_ARG (0) - PTR_ARG (1) sub off2, src2, src1 mov zeroones, REP8_01 and tmp, src1, 7 diff --git a/sysdeps/aarch64/strcpy.S b/sysdeps/aarch64/strcpy.S index 5477597f71..ce4c3e8b5e 100644 --- a/sysdeps/aarch64/strcpy.S +++ b/sysdeps/aarch64/strcpy.S @@ -69,8 +69,6 @@ exactly which byte matched. */ ENTRY (STRCPY) - PTR_ARG (0) - PTR_ARG (1) bic src, srcin, 15 ld1 {vdata.16b}, [src] cmeq vhas_nul.16b, vdata.16b, 0 diff --git a/sysdeps/aarch64/strlen.S b/sysdeps/aarch64/strlen.S index a7df05676c..1874a55d91 100644 --- a/sysdeps/aarch64/strlen.S +++ b/sysdeps/aarch64/strlen.S @@ -49,7 +49,6 @@ identifies the first zero byte. */ ENTRY (STRLEN) - PTR_ARG (0) bic src, srcin, 15 ld1 {vdata.16b}, [src] cmeq vhas_nul.16b, vdata.16b, 0 diff --git a/sysdeps/aarch64/strnlen.S b/sysdeps/aarch64/strnlen.S index 9c406395bc..e60dbe0e25 100644 --- a/sysdeps/aarch64/strnlen.S +++ b/sysdeps/aarch64/strnlen.S @@ -49,8 +49,6 @@ identifies the first zero byte. */ ENTRY (__strnlen) - PTR_ARG (0) - SIZE_ARG (1) bic src, srcin, 15 cbz cntin, L(nomatch) ld1 {vdata.16b}, [src] diff --git a/sysdeps/aarch64/strrchr.S b/sysdeps/aarch64/strrchr.S index 869b1cd790..8f4e7ee4f6 100644 --- a/sysdeps/aarch64/strrchr.S +++ b/sysdeps/aarch64/strrchr.S @@ -55,7 +55,6 @@ if the relevant byte matched the NUL end of string. */ ENTRY (strrchr) - PTR_ARG (0) bic src, srcin, 15 dup vrepchr.16b, chrin movi vrepmask.16b, 0x33 diff --git a/sysdeps/unix/sysv/linux/aarch64/clone.S b/sysdeps/unix/sysv/linux/aarch64/clone.S index 97e1afa57f..40015c6933 100644 --- a/sysdeps/unix/sysv/linux/aarch64/clone.S +++ b/sysdeps/unix/sysv/linux/aarch64/clone.S @@ -33,12 +33,6 @@ */ .text ENTRY(__clone) - PTR_ARG (0) - PTR_ARG (1) - PTR_ARG (3) - PTR_ARG (4) - PTR_ARG (5) - PTR_ARG (6) /* Save args for the child. */ mov x10, x0 mov x11, x2 diff --git a/sysdeps/unix/sysv/linux/aarch64/clone3.S b/sysdeps/unix/sysv/linux/aarch64/clone3.S index 443e117bf9..c9ca845ef2 100644 --- a/sysdeps/unix/sysv/linux/aarch64/clone3.S +++ b/sysdeps/unix/sysv/linux/aarch64/clone3.S @@ -36,10 +36,6 @@ .text ENTRY(__clone3) - PTR_ARG (0) - PTR_ARG (1) - PTR_ARG (3) - PTR_ARG (4) /* Save args for the child. */ mov x10, x0 /* cl_args */ mov x11, x2 /* func */ diff --git a/sysdeps/unix/sysv/linux/aarch64/getcontext.S b/sysdeps/unix/sysv/linux/aarch64/getcontext.S index d9dd066051..6e7fc241f5 100644 --- a/sysdeps/unix/sysv/linux/aarch64/getcontext.S +++ b/sysdeps/unix/sysv/linux/aarch64/getcontext.S @@ -30,7 +30,6 @@ .text ENTRY(__getcontext) - PTR_ARG (0) /* The saved context will return to the getcontext() call point with a return value of 0 */ str xzr, [x0, oX0 + 0 * SZREG] diff --git a/sysdeps/unix/sysv/linux/aarch64/setcontext.S b/sysdeps/unix/sysv/linux/aarch64/setcontext.S index 695fc5b9b5..022a263c47 100644 --- a/sysdeps/unix/sysv/linux/aarch64/setcontext.S +++ b/sysdeps/unix/sysv/linux/aarch64/setcontext.S @@ -34,7 +34,6 @@ .text ENTRY (__setcontext) - PTR_ARG (0) /* Save a copy of UCP. */ mov x9, x0 diff --git a/sysdeps/unix/sysv/linux/aarch64/swapcontext.S b/sysdeps/unix/sysv/linux/aarch64/swapcontext.S index 893a902866..cc41253a13 100644 --- a/sysdeps/unix/sysv/linux/aarch64/swapcontext.S +++ b/sysdeps/unix/sysv/linux/aarch64/swapcontext.S @@ -27,7 +27,6 @@ .text ENTRY(__swapcontext) - PTR_ARG (0) /* Set the value returned when swapcontext() returns in this context. And set up x1 to become the return address of the caller, so we can return there with a normal RET instead of an indirect jump. */