1--- /home/chris/oh_patchlibc_0803/openharmony/third_party/musl/arch/aarch64/syscall_arch.h 2023-08-07 15:19:23.150403358 +0800 2+++ topatch/arch/aarch64/syscall_arch.h 2023-08-07 16:24:49.173781279 +0800 3@@ -1,78 +1,31 @@ 4+/* 5+ * Copyright (c) 2023 Institute of Parallel And Distributed Systems (IPADS), Shanghai Jiao Tong University (SJTU) 6+ * Licensed under the Mulan PSL v2. 7+ * You can use this software according to the terms and conditions of the Mulan PSL v2. 8+ * You may obtain a copy of Mulan PSL v2 at: 9+ * http://license.coscl.org.cn/MulanPSL2 10+ * THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND, EITHER EXPRESS OR 11+ * IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, MERCHANTABILITY OR FIT FOR A PARTICULAR 12+ * PURPOSE. 13+ * See the Mulan PSL v2 for more details. 14+ */ 15+ 16 #define __SYSCALL_LL_E(x) (x) 17 #define __SYSCALL_LL_O(x) (x) 18 19-#define __asm_syscall(...) do { \ 20- __asm__ __volatile__ ( "svc 0" \ 21- : "=r"(x0) : __VA_ARGS__ : "memory", "cc"); \ 22- return x0; \ 23- } while (0) 24- 25-static inline long __syscall0(long n) 26-{ 27- register long x8 __asm__("x8") = n; 28- register long x0 __asm__("x0"); 29- __asm_syscall("r"(x8)); 30-} 31- 32-static inline long __syscall1(long n, long a) 33-{ 34- register long x8 __asm__("x8") = n; 35- register long x0 __asm__("x0") = a; 36- __asm_syscall("r"(x8), "0"(x0)); 37-} 38- 39-static inline long __syscall2(long n, long a, long b) 40-{ 41- register long x8 __asm__("x8") = n; 42- register long x0 __asm__("x0") = a; 43- register long x1 __asm__("x1") = b; 44- __asm_syscall("r"(x8), "0"(x0), "r"(x1)); 45-} 46- 47-static inline long __syscall3(long n, long a, long b, long c) 48-{ 49- register long x8 __asm__("x8") = n; 50- register long x0 __asm__("x0") = a; 51- register long x1 __asm__("x1") = b; 52- register long x2 __asm__("x2") = c; 53- __asm_syscall("r"(x8), "0"(x0), "r"(x1), "r"(x2)); 54-} 55- 56-static inline long __syscall4(long n, long a, long b, long c, long d) 57-{ 58- register long x8 __asm__("x8") = n; 59- register long x0 __asm__("x0") = a; 60- register long x1 __asm__("x1") = b; 61- register long x2 __asm__("x2") = c; 62- register long x3 __asm__("x3") = d; 63- __asm_syscall("r"(x8), "0"(x0), "r"(x1), "r"(x2), "r"(x3)); 64-} 65- 66-static inline long __syscall5(long n, long a, long b, long c, long d, long e) 67-{ 68- register long x8 __asm__("x8") = n; 69- register long x0 __asm__("x0") = a; 70- register long x1 __asm__("x1") = b; 71- register long x2 __asm__("x2") = c; 72- register long x3 __asm__("x3") = d; 73- register long x4 __asm__("x4") = e; 74- __asm_syscall("r"(x8), "0"(x0), "r"(x1), "r"(x2), "r"(x3), "r"(x4)); 75-} 76- 77-static inline long __syscall6(long n, long a, long b, long c, long d, long e, long f) 78-{ 79- register long x8 __asm__("x8") = n; 80- register long x0 __asm__("x0") = a; 81- register long x1 __asm__("x1") = b; 82- register long x2 __asm__("x2") = c; 83- register long x3 __asm__("x3") = d; 84- register long x4 __asm__("x4") = e; 85- register long x5 __asm__("x5") = f; 86- __asm_syscall("r"(x8), "0"(x0), "r"(x1), "r"(x2), "r"(x3), "r"(x4), "r"(x5)); 87-} 88+long __syscall0(long n); 89+long __syscall1(long n, long a); 90+long __syscall2(long n, long a, long b); 91+long __syscall3(long n, long a, long b, long c); 92+long __syscall4(long n, long a, long b, long c, long d); 93+long __syscall5(long n, long a, long b, long c, long d, long e); 94+long __syscall6(long n, long a, long b, long c, long d, long e, long f); 95 96+/* disable VDSO */ 97+#if 0 98 #define VDSO_USEFUL 99 #define VDSO_CGT_SYM "__kernel_clock_gettime" 100 #define VDSO_CGT_VER "LINUX_2.6.39" 101+#endif 102 103 #define IPC_64 0 104