1 /*
2 * Copyright (c) 2022 Huawei Device Co., Ltd.
3 * Licensed under the Apache License, Version 2.0 (the "License");
4 * you may not use this file except in compliance with the License.
5 * You may obtain a copy of the License at
6 *
7 * http://www.apache.org/licenses/LICENSE-2.0
8 *
9 * Unless required by applicable law or agreed to in writing, software
10 * distributed under the License is distributed on an "AS IS" BASIS,
11 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 * See the License for the specific language governing permissions and
13 * limitations under the License.
14 */
15
16 #define __SYSCALL_LL_E(x) (x)
17 #define __SYSCALL_LL_O(x) (x)
18
19 #define __asm_syscall(...) do { \
20 __asm__ __volatile__ ( "svc 0" \
21 : "=r"(x0) : __VA_ARGS__ : "memory", "cc"); \
22 return x0; \
23 } while (0)
24
__syscall0(long n)25 static inline long __syscall0(long n)
26 {
27 register long x8 __asm__("x8") = n;
28 register long x0 __asm__("x0");
29 __asm_syscall("r"(x8));
30 }
31
__syscall1(long n,long a)32 static inline long __syscall1(long n, long a)
33 {
34 register long x8 __asm__("x8") = n;
35 register long x0 __asm__("x0") = a;
36 __asm_syscall("r"(x8), "0"(x0));
37 }
38
__syscall2(long n,long a,long b)39 static inline long __syscall2(long n, long a, long b)
40 {
41 register long x8 __asm__("x8") = n;
42 register long x0 __asm__("x0") = a;
43 register long x1 __asm__("x1") = b;
44 __asm_syscall("r"(x8), "0"(x0), "r"(x1));
45 }
46
__syscall3(long n,long a,long b,long c)47 static inline long __syscall3(long n, long a, long b, long c)
48 {
49 register long x8 __asm__("x8") = n;
50 register long x0 __asm__("x0") = a;
51 register long x1 __asm__("x1") = b;
52 register long x2 __asm__("x2") = c;
53 __asm_syscall("r"(x8), "0"(x0), "r"(x1), "r"(x2));
54 }
55
__syscall4(long n,long a,long b,long c,long d)56 static inline long __syscall4(long n, long a, long b, long c, long d)
57 {
58 register long x8 __asm__("x8") = n;
59 register long x0 __asm__("x0") = a;
60 register long x1 __asm__("x1") = b;
61 register long x2 __asm__("x2") = c;
62 register long x3 __asm__("x3") = d;
63 __asm_syscall("r"(x8), "0"(x0), "r"(x1), "r"(x2), "r"(x3));
64 }
65
__syscall5(long n,long a,long b,long c,long d,long e)66 static inline long __syscall5(long n, long a, long b, long c, long d, long e)
67 {
68 register long x8 __asm__("x8") = n;
69 register long x0 __asm__("x0") = a;
70 register long x1 __asm__("x1") = b;
71 register long x2 __asm__("x2") = c;
72 register long x3 __asm__("x3") = d;
73 register long x4 __asm__("x4") = e;
74 __asm_syscall("r"(x8), "0"(x0), "r"(x1), "r"(x2), "r"(x3), "r"(x4));
75 }
76
__syscall6(long n,long a,long b,long c,long d,long e,long f)77 static inline long __syscall6(long n, long a, long b, long c, long d, long e, long f)
78 {
79 register long x8 __asm__("x8") = n;
80 register long x0 __asm__("x0") = a;
81 register long x1 __asm__("x1") = b;
82 register long x2 __asm__("x2") = c;
83 register long x3 __asm__("x3") = d;
84 register long x4 __asm__("x4") = e;
85 register long x5 __asm__("x5") = f;
86 __asm_syscall("r"(x8), "0"(x0), "r"(x1), "r"(x2), "r"(x3), "r"(x4), "r"(x5));
87 }
88
89 #define VDSO_USEFUL
90 #define VDSO_CGT_SYM "__kernel_clock_gettime"
91 #define VDSO_CGT_VER "LINUX_2.6.39"
92 #define VDSO_CGR_SYM "__kernel_clock_getres"
93 #define VDSO_CGR_VER "LINUX_2.6.39"
94 #define VDSO_GTD_SYM "__kernel_gettimeofday"
95 #define VDSO_GTD_VER "LINUX_2.6.39"
96
97 #define IPC_64 0
98