1/****************************************************************************** 2 * Copyright © 2018, VideoLAN and dav1d authors 3 * Copyright © 2015 Martin Storsjo 4 * Copyright © 2015 Janne Grunau 5 * All rights reserved. 6 * 7 * Redistribution and use in source and binary forms, with or without 8 * modification, are permitted provided that the following conditions are met: 9 * 10 * 1. Redistributions of source code must retain the above copyright notice, this 11 * list of conditions and the following disclaimer. 12 * 13 * 2. Redistributions in binary form must reproduce the above copyright notice, 14 * this list of conditions and the following disclaimer in the documentation 15 * and/or other materials provided with the distribution. 16 * 17 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND 18 * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED 19 * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 20 * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR 21 * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES 22 * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; 23 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND 24 * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 25 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS 26 * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 27 *****************************************************************************/ 28 29#define PRIVATE_PREFIX checkasm_ 30 31#include "src/arm/asm.S" 32#include "src/arm/32/util.S" 33 34const register_init, align=3 35 .quad 0x21f86d66c8ca00ce 36 .quad 0x75b6ba21077c48ad 37 .quad 0xed56bb2dcb3c7736 38 .quad 0x8bda43d3fd1a7e06 39 .quad 0xb64a9c9e5d318408 40 .quad 0xdf9a54b303f1d3a3 41 .quad 0x4a75479abd64e097 42 .quad 0x249214109d5d1c88 43endconst 44 45const error_message_fpscr 46 .asciz "failed to preserve register FPSCR, changed bits: %x" 47error_message_gpr: 48 .asciz "failed to preserve register r%d" 49error_message_vfp: 50 .asciz "failed to preserve register d%d" 51error_message_stack: 52 .asciz "failed to preserve stack" 53endconst 54 55@ max number of args used by any asm function. 56#define MAX_ARGS 15 57 58#define ARG_STACK 4*(MAX_ARGS - 4) 59 60@ Align the used stack space to 8 to preserve the stack alignment. 61@ +8 for stack canary reference. 62#define ARG_STACK_A (((ARG_STACK + pushed + 7) & ~7) - pushed + 8) 63 64.macro clobbercheck variant 65.equ pushed, 4*9 66function checked_call_\variant, export=1 67 push {r4-r11, lr} 68.ifc \variant, vfp 69 vpush {d8-d15} 70 fmrx r4, FPSCR 71 push {r4} 72.equ pushed, pushed + 16*4 + 4 73.endif 74 75 movrel r12, register_init 76.ifc \variant, vfp 77 vldm r12, {d8-d15} 78.endif 79 ldm r12, {r4-r11} 80 81 sub sp, sp, #ARG_STACK_A 82.equ pos, 0 83.rept MAX_ARGS-4 84 ldr r12, [sp, #ARG_STACK_A + pushed + 8 + pos] 85 str r12, [sp, #pos] 86.equ pos, pos + 4 87.endr 88 89 @ For stack overflows, the callee is free to overwrite the parameters 90 @ that were passed on the stack (if any), so we can only check after 91 @ that point. First figure out how many parameters the function 92 @ really took on the stack: 93 ldr r12, [sp, #ARG_STACK_A + pushed + 8 + 4*(MAX_ARGS-4)] 94 @ Load the first non-parameter value from the stack, that should be 95 @ left untouched by the function. Store a copy of it inverted, so that 96 @ e.g. overwriting everything with zero would be noticed. 97 ldr r12, [sp, r12, lsl #2] 98 mvn r12, r12 99 str r12, [sp, #ARG_STACK_A - 4] 100 101 mov r12, r0 102 mov r0, r2 103 mov r1, r3 104 ldrd r2, r3, [sp, #ARG_STACK_A + pushed] 105 @ Call the target function 106 blx r12 107 108 @ Load the number of stack parameters, stack canary and its reference 109 ldr r12, [sp, #ARG_STACK_A + pushed + 8 + 4*(MAX_ARGS-4)] 110 ldr r2, [sp, r12, lsl #2] 111 ldr r3, [sp, #ARG_STACK_A - 4] 112 113 add sp, sp, #ARG_STACK_A 114 push {r0, r1} 115 116 mvn r3, r3 117 cmp r2, r3 118 bne 5f 119 120 movrel r12, register_init 121.ifc \variant, vfp 122.macro check_reg_vfp, dreg, offset 123 ldrd r2, r3, [r12, #8 * (\offset)] 124 vmov r0, lr, \dreg 125 eor r2, r2, r0 126 eor r3, r3, lr 127 orrs r2, r2, r3 128 bne 4f 129.endm 130 131.irp n, 8, 9, 10, 11, 12, 13, 14, 15 132 @ keep track of the checked double/SIMD register 133 mov r1, #\n 134 check_reg_vfp d\n, \n-8 135.endr 136.purgem check_reg_vfp 137 138 fmrx r1, FPSCR 139 ldr r3, [sp, #8] 140 eor r1, r1, r3 141 @ Ignore changes in bits 0-4 and 7 142 bic r1, r1, #0x9f 143 @ Ignore changes in the topmost 5 bits 144 bics r1, r1, #0xf8000000 145 bne 3f 146.endif 147 148 @ keep track of the checked GPR 149 mov r1, #4 150.macro check_reg reg1, reg2= 151 ldrd r2, r3, [r12], #8 152 eors r2, r2, \reg1 153 bne 2f 154 add r1, r1, #1 155.ifnb \reg2 156 eors r3, r3, \reg2 157 bne 2f 158.endif 159 add r1, r1, #1 160.endm 161 check_reg r4, r5 162 check_reg r6, r7 163@ r9 is a volatile register in the ios ABI 164#ifdef __APPLE__ 165 check_reg r8 166#else 167 check_reg r8, r9 168#endif 169 check_reg r10, r11 170.purgem check_reg 171 172 b 0f 1735: 174 movrel r0, error_message_stack 175 b 1f 1764: 177 movrel r0, error_message_vfp 178 b 1f 1793: 180 movrel r0, error_message_fpscr 181 b 1f 1822: 183 movrel r0, error_message_gpr 1841: 185#ifdef PREFIX 186 bl _checkasm_fail_func 187#else 188 bl checkasm_fail_func 189#endif 1900: 191 pop {r0, r1} 192.ifc \variant, vfp 193 pop {r2} 194 fmxr FPSCR, r2 195 vpop {d8-d15} 196.endif 197 pop {r4-r11, pc} 198endfunc 199.endm 200 201clobbercheck vfp 202