1 /* SPDX-License-Identifier: LGPL-2.1 OR MIT */ 2 /* 3 * ARM specific definitions for NOLIBC 4 * Copyright (C) 2017-2022 Willy Tarreau <w@1wt.eu> 5 */ 6 7 #ifndef _NOLIBC_ARCH_ARM_H 8 #define _NOLIBC_ARCH_ARM_H 9 10 /* O_* macros for fcntl/open are architecture-specific */ 11 #define O_RDONLY 0 12 #define O_WRONLY 1 13 #define O_RDWR 2 14 #define O_CREAT 0x40 15 #define O_EXCL 0x80 16 #define O_NOCTTY 0x100 17 #define O_TRUNC 0x200 18 #define O_APPEND 0x400 19 #define O_NONBLOCK 0x800 20 #define O_DIRECTORY 0x4000 21 22 /* The struct returned by the stat() syscall, 32-bit only, the syscall returns 23 * exactly 56 bytes (stops before the unused array). In big endian, the format 24 * differs as devices are returned as short only. 25 */ 26 struct sys_stat_struct { 27 #if defined(__ARMEB__) 28 unsigned short st_dev; 29 unsigned short __pad1; 30 #else 31 unsigned long st_dev; 32 #endif 33 unsigned long st_ino; 34 unsigned short st_mode; 35 unsigned short st_nlink; 36 unsigned short st_uid; 37 unsigned short st_gid; 38 39 #if defined(__ARMEB__) 40 unsigned short st_rdev; 41 unsigned short __pad2; 42 #else 43 unsigned long st_rdev; 44 #endif 45 unsigned long st_size; 46 unsigned long st_blksize; 47 unsigned long st_blocks; 48 49 unsigned long st_atime; 50 unsigned long st_atime_nsec; 51 unsigned long st_mtime; 52 unsigned long st_mtime_nsec; 53 54 unsigned long st_ctime; 55 unsigned long st_ctime_nsec; 56 unsigned long __unused[2]; 57 }; 58 59 /* Syscalls for ARM in ARM or Thumb modes : 60 * - registers are 32-bit 61 * - stack is 8-byte aligned 62 * ( http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.faqs/ka4127.html) 63 * - syscall number is passed in r7 64 * - arguments are in r0, r1, r2, r3, r4, r5 65 * - the system call is performed by calling svc #0 66 * - syscall return comes in r0. 67 * - only lr is clobbered. 68 * - the arguments are cast to long and assigned into the target registers 69 * which are then simply passed as registers to the asm code, so that we 70 * don't have to experience issues with register constraints. 71 * - the syscall number is always specified last in order to allow to force 72 * some registers before (gcc refuses a %-register at the last position). 73 * 74 * Also, ARM supports the old_select syscall if newselect is not available 75 */ 76 #define __ARCH_WANT_SYS_OLD_SELECT 77 78 #define my_syscall0(num) \ 79 ({ \ 80 register long _num asm("r7") = (num); \ 81 register long _arg1 asm("r0"); \ 82 \ 83 asm volatile ( \ 84 "svc #0\n" \ 85 : "=r"(_arg1) \ 86 : "r"(_num) \ 87 : "memory", "cc", "lr" \ 88 ); \ 89 _arg1; \ 90 }) 91 92 #define my_syscall1(num, arg1) \ 93 ({ \ 94 register long _num asm("r7") = (num); \ 95 register long _arg1 asm("r0") = (long)(arg1); \ 96 \ 97 asm volatile ( \ 98 "svc #0\n" \ 99 : "=r"(_arg1) \ 100 : "r"(_arg1), \ 101 "r"(_num) \ 102 : "memory", "cc", "lr" \ 103 ); \ 104 _arg1; \ 105 }) 106 107 #define my_syscall2(num, arg1, arg2) \ 108 ({ \ 109 register long _num asm("r7") = (num); \ 110 register long _arg1 asm("r0") = (long)(arg1); \ 111 register long _arg2 asm("r1") = (long)(arg2); \ 112 \ 113 asm volatile ( \ 114 "svc #0\n" \ 115 : "=r"(_arg1) \ 116 : "r"(_arg1), "r"(_arg2), \ 117 "r"(_num) \ 118 : "memory", "cc", "lr" \ 119 ); \ 120 _arg1; \ 121 }) 122 123 #define my_syscall3(num, arg1, arg2, arg3) \ 124 ({ \ 125 register long _num asm("r7") = (num); \ 126 register long _arg1 asm("r0") = (long)(arg1); \ 127 register long _arg2 asm("r1") = (long)(arg2); \ 128 register long _arg3 asm("r2") = (long)(arg3); \ 129 \ 130 asm volatile ( \ 131 "svc #0\n" \ 132 : "=r"(_arg1) \ 133 : "r"(_arg1), "r"(_arg2), "r"(_arg3), \ 134 "r"(_num) \ 135 : "memory", "cc", "lr" \ 136 ); \ 137 _arg1; \ 138 }) 139 140 #define my_syscall4(num, arg1, arg2, arg3, arg4) \ 141 ({ \ 142 register long _num asm("r7") = (num); \ 143 register long _arg1 asm("r0") = (long)(arg1); \ 144 register long _arg2 asm("r1") = (long)(arg2); \ 145 register long _arg3 asm("r2") = (long)(arg3); \ 146 register long _arg4 asm("r3") = (long)(arg4); \ 147 \ 148 asm volatile ( \ 149 "svc #0\n" \ 150 : "=r"(_arg1) \ 151 : "r"(_arg1), "r"(_arg2), "r"(_arg3), "r"(_arg4), \ 152 "r"(_num) \ 153 : "memory", "cc", "lr" \ 154 ); \ 155 _arg1; \ 156 }) 157 158 #define my_syscall5(num, arg1, arg2, arg3, arg4, arg5) \ 159 ({ \ 160 register long _num asm("r7") = (num); \ 161 register long _arg1 asm("r0") = (long)(arg1); \ 162 register long _arg2 asm("r1") = (long)(arg2); \ 163 register long _arg3 asm("r2") = (long)(arg3); \ 164 register long _arg4 asm("r3") = (long)(arg4); \ 165 register long _arg5 asm("r4") = (long)(arg5); \ 166 \ 167 asm volatile ( \ 168 "svc #0\n" \ 169 : "=r" (_arg1) \ 170 : "r"(_arg1), "r"(_arg2), "r"(_arg3), "r"(_arg4), "r"(_arg5), \ 171 "r"(_num) \ 172 : "memory", "cc", "lr" \ 173 ); \ 174 _arg1; \ 175 }) 176 177 /* startup code */ 178 asm(".section .text\n" 179 ".weak _start\n" 180 "_start:\n" 181 #if defined(__THUMBEB__) || defined(__THUMBEL__) 182 /* We enter here in 32-bit mode but if some previous functions were in 183 * 16-bit mode, the assembler cannot know, so we need to tell it we're in 184 * 32-bit now, then switch to 16-bit (is there a better way to do it than 185 * adding 1 by hand ?) and tell the asm we're now in 16-bit mode so that 186 * it generates correct instructions. Note that we do not support thumb1. 187 */ 188 ".code 32\n" 189 "add r0, pc, #1\n" 190 "bx r0\n" 191 ".code 16\n" 192 #endif 193 "pop {%r0}\n" // argc was in the stack 194 "mov %r1, %sp\n" // argv = sp 195 "add %r2, %r1, %r0, lsl #2\n" // envp = argv + 4*argc ... 196 "add %r2, %r2, $4\n" // ... + 4 197 "and %r3, %r1, $-8\n" // AAPCS : sp must be 8-byte aligned in the 198 "mov %sp, %r3\n" // callee, an bl doesn't push (lr=pc) 199 "bl main\n" // main() returns the status code, we'll exit with it. 200 "movs r7, $1\n" // NR_exit == 1 201 "svc $0x00\n" 202 ""); 203 204 #endif // _NOLIBC_ARCH_ARM_H 205