1/* 2 * Copyright (C) 2013 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17#ifndef ART_RUNTIME_ARCH_X86_ASM_SUPPORT_X86_S_ 18#define ART_RUNTIME_ARCH_X86_ASM_SUPPORT_X86_S_ 19 20#include "asm_support_x86.h" 21#include "interpreter/cfi_asm_support.h" 22 23// Regular gas(1) & current clang/llvm assembler support named macro parameters. 24#define MACRO0(macro_name) .macro macro_name 25#define MACRO1(macro_name, macro_arg1) .macro macro_name macro_arg1 26#define MACRO2(macro_name, macro_arg1, macro_arg2) .macro macro_name macro_arg1, macro_arg2 27#define MACRO3(macro_name, macro_arg1, macro_arg2, macro_arg3) .macro macro_name macro_arg1, macro_arg2, macro_arg3 28#define MACRO4(macro_name, macro_arg1, macro_arg2, macro_arg3, macro_arg4) .macro macro_name macro_arg1, macro_arg2, macro_arg3, macro_arg4 29#define MACRO5(macro_name, macro_arg1, macro_arg2, macro_arg3, macro_arg4, macro_arg5) .macro macro_name macro_arg1, macro_arg2, macro_arg3, macro_arg4, macro_arg5 30#define END_MACRO .endm 31 32#if defined(__clang__) 33 // Clang/llvm does not support .altmacro. However, the clang/llvm preprocessor doesn't 34 // separate the backslash and parameter by a space. Everything just works. 35 #define RAW_VAR(name) \name 36 #define VAR(name) \name 37 #define CALLVAR(name) SYMBOL(\name) 38 #define PLT_VAR(name) \name@PLT 39 #define REG_VAR(name) %\name 40 #define CALL_MACRO(name) \name 41#else 42 // Regular gas(1) uses \argument_name for macro arguments. 43 // We need to turn on alternate macro syntax so we can use & instead or the preprocessor 44 // will screw us by inserting a space between the \ and the name. Even in this mode there's 45 // no special meaning to $, so literals are still just $x. The use of altmacro means % is a 46 // special character meaning care needs to be taken when passing registers as macro 47 // arguments. 48 .altmacro 49 #define RAW_VAR(name) name& 50 #define VAR(name) name& 51 #define CALLVAR(name) SYMBOL(name&) 52 #define PLT_VAR(name) name&@PLT 53 #define REG_VAR(name) %name 54 #define CALL_MACRO(name) name& 55#endif 56 57#define LITERAL(value) $value 58#if defined(__APPLE__) 59 #define MACRO_LITERAL(value) $(value) 60#else 61 #define MACRO_LITERAL(value) $value 62#endif 63 64#if defined(__APPLE__) 65 #define FUNCTION_TYPE(name) 66 #define SIZE(name) 67#else 68 #define FUNCTION_TYPE(name) .type name, @function 69 #define SIZE(name) .size name, .-name 70#endif 71 72 // CFI support. 73#if !defined(__APPLE__) 74 #define CFI_STARTPROC .cfi_startproc 75 #define CFI_ENDPROC .cfi_endproc 76 #define CFI_ADJUST_CFA_OFFSET(size) .cfi_adjust_cfa_offset size 77 #define CFI_DEF_CFA(reg,size) .cfi_def_cfa reg,size 78 #define CFI_DEF_CFA_REGISTER(reg) .cfi_def_cfa_register reg 79 #define CFI_RESTORE(reg) .cfi_restore reg 80 #define CFI_REL_OFFSET(reg,size) .cfi_rel_offset reg,size 81 #define CFI_REGISTER(orig_reg, current_reg) .cfi_register orig_reg, current_reg 82 #define CFI_REMEMBER_STATE .cfi_remember_state 83 // The spec is not clear whether the CFA is part of the saved state and tools 84 // differ in the behaviour, so explicitly set the CFA to avoid any ambiguity. 85 // The restored CFA state should match the CFA state during CFI_REMEMBER_STATE. 86 // `objdump -Wf libart.so | egrep "_cfa|_state"` is useful to audit the opcodes. 87 MACRO2(CFI_RESTORE_STATE_AND_DEF_CFA, reg, off) 88 .cfi_restore_state 89 .cfi_def_cfa \reg,\off 90 END_MACRO 91 #define CFI_ESCAPE(...) .cfi_escape __VA_ARGS__ 92 #define CFI_RESTORE_STATE .cfi_restore_state 93#else 94 // Mac OS' doesn't like cfi_* directives. 95 #define CFI_STARTPROC 96 #define CFI_ENDPROC 97 #define CFI_ADJUST_CFA_OFFSET(size) 98 #define CFI_DEF_CFA(reg,size) 99 #define CFI_DEF_CFA_REGISTER(reg) 100 #define CFI_RESTORE(reg) 101 #define CFI_REL_OFFSET(reg,size) 102 #define CFI_REGISTER(orig_reg, current_reg) 103 #define CFI_REMEMBER_STATE 104 MACRO2(CFI_RESTORE_STATE_AND_DEF_CFA, reg, off) 105 END_MACRO 106 #define CFI_ESCAPE(...) 107 #define CFI_RESTORE_STATE 108#endif 109 110#define CFI_REG_eax 0 111#define CFI_REG_ecx 1 112#define CFI_REG_edx 2 113#define CFI_REG_ebx 3 114#define CFI_REG_esp 4 115#define CFI_REG_ebp 5 116#define CFI_REG_esi 6 117#define CFI_REG_edi 7 118#define CFI_REG_eip 8 119 120#define CFI_REG(reg) CFI_REG_##reg 121 122MACRO3(CFI_EXPRESSION_BREG, n, b, offset) 123 .if (-0x40 <= (\offset)) && ((\offset) < 0x40) 124 CFI_EXPRESSION_BREG_1(\n, \b, \offset) 125 .elseif (-0x2000 <= (\offset)) && ((\offset) < 0x2000) 126 CFI_EXPRESSION_BREG_2(\n, \b, \offset) 127 .else 128 .error "Unsupported offset" 129 .endif 130END_MACRO 131 132MACRO3(CFI_DEF_CFA_BREG_PLUS_UCONST, reg, offset, size) 133 .if ((\size) < 0) 134 .error "Size should be positive" 135 .endif 136 .if (((\offset) < -0x40) || ((\offset) >= 0x40)) 137 .error "Unsupported offset" 138 .endif 139 .if ((\size) < 0x80) 140 CFI_DEF_CFA_BREG_PLUS_UCONST_1_1(\reg, \offset, \size) 141 .elseif ((\size) < 0x4000) 142 CFI_DEF_CFA_BREG_PLUS_UCONST_1_2(\reg, \offset, \size) 143 .else 144 .error "Unsupported size" 145 .endif 146END_MACRO 147 148 // Symbols. On a Mac, we need a leading underscore. 149#if !defined(__APPLE__) 150 #define SYMBOL(name) name 151 #define PLT_SYMBOL(name) name ## @PLT 152#else 153 // Mac OS' symbols have an _ prefix. 154 #define SYMBOL(name) _ ## name 155 #define PLT_SYMBOL(name) _ ## name 156#endif 157 158// Directive to hide a function symbol. 159#if defined(__APPLE__) 160 #define ASM_HIDDEN .private_extern 161#else 162 #define ASM_HIDDEN .hidden 163#endif 164 165 /* Cache alignment for function entry */ 166MACRO0(ALIGN_FUNCTION_ENTRY) 167 // ART-compiled functions have OatQuickMethodHeader but assembly funtions do not. 168 // Prefix the assembly code with 0xFFs, which means there is no method header. 169 .byte 0xFF, 0xFF, 0xFF, 0xFF 170 // Cache alignment for function entry. 171 // Use 0xFF as the last 4 bytes of alignment stand for OatQuickMethodHeader. 172 .balign 16, 0xFF 173END_MACRO 174 175MACRO2(DEFINE_FUNCTION_CUSTOM_CFA, c_name, cfa_offset) 176 FUNCTION_TYPE(SYMBOL(\c_name)) 177 ASM_HIDDEN CALLVAR(c_name) 178 .globl CALLVAR(c_name) 179 ALIGN_FUNCTION_ENTRY 180CALLVAR(c_name): 181 CFI_STARTPROC 182 // Ensure we get an appropriate starting CFA. 183 CFI_DEF_CFA(esp, RAW_VAR(cfa_offset)) 184END_MACRO 185 186MACRO1(DEFINE_FUNCTION, c_name) 187 DEFINE_FUNCTION_CUSTOM_CFA RAW_VAR(c_name), __SIZEOF_POINTER__ 188END_MACRO 189 190MACRO1(END_FUNCTION, c_name) 191 CFI_ENDPROC 192 SIZE(SYMBOL(\c_name)) 193END_MACRO 194 195MACRO1(PUSH, reg) 196 pushl REG_VAR(reg) 197 CFI_ADJUST_CFA_OFFSET(4) 198 CFI_REL_OFFSET(REG_VAR(reg), 0) 199END_MACRO 200 201MACRO1(POP, reg) 202 popl REG_VAR(reg) 203 CFI_ADJUST_CFA_OFFSET(-4) 204 CFI_RESTORE(REG_VAR(reg)) 205END_MACRO 206 207// Arguments do not need .cfi_rel_offset as they are caller-saved and 208// therefore cannot hold caller's variables or unwinding data. 209MACRO1(PUSH_ARG, reg) 210 pushl REG_VAR(reg) 211 CFI_ADJUST_CFA_OFFSET(4) 212END_MACRO 213 214MACRO1(POP_ARG, reg) 215 popl REG_VAR(reg) 216 CFI_ADJUST_CFA_OFFSET(-4) 217END_MACRO 218 219MACRO1(CFI_RESTORE_REG, reg) 220 CFI_RESTORE(REG_VAR(reg)) 221END_MACRO 222 223MACRO1(INCREASE_FRAME, frame_adjustment) 224 subl MACRO_LITERAL(RAW_VAR(frame_adjustment)), %esp 225 CFI_ADJUST_CFA_OFFSET((RAW_VAR(frame_adjustment))) 226END_MACRO 227 228MACRO1(DECREASE_FRAME, frame_adjustment) 229 addl MACRO_LITERAL(RAW_VAR(frame_adjustment)), %esp 230 CFI_ADJUST_CFA_OFFSET(-(RAW_VAR(frame_adjustment))) 231END_MACRO 232 233#define UNREACHABLE int3 234 235MACRO1(UNIMPLEMENTED,name) 236 FUNCTION_TYPE(\name) 237 .globl VAR(name) 238 ALIGN_FUNCTION_ENTRY 239VAR(name): 240 CFI_STARTPROC 241 UNREACHABLE 242 UNREACHABLE 243 CFI_ENDPROC 244 SIZE(\name) 245END_MACRO 246 247MACRO3(SETUP_PC_REL_BASE_IMPL, reg, label, call_label) 248 call RAW_VAR(call_label) 249 CFI_ADJUST_CFA_OFFSET(4) 250RAW_VAR(label): 251 popl REG_VAR(reg) 252 CFI_ADJUST_CFA_OFFSET(-4) 253END_MACRO 254 255MACRO1(SETUP_PC_REL_BASE_0, reg) 256 SETUP_PC_REL_BASE_IMPL \reg, 0, 0f 257END_MACRO 258 259MACRO2(SETUP_PC_REL_BASE, reg, label) 260 SETUP_PC_REL_BASE_IMPL \reg, \label, \label 261END_MACRO 262 263MACRO1(LOAD_RUNTIME_INSTANCE, reg) 264 SETUP_PC_REL_BASE_0 \reg 265 // Load Runtime::instance_. 266 movl SYMBOL(_ZN3art7Runtime9instance_E) - 0b(REG_VAR(reg)), REG_VAR(reg) 267END_MACRO 268 269// Macros to poison (negate) the reference for heap poisoning. 270MACRO1(POISON_HEAP_REF, rRef) 271#ifdef USE_HEAP_POISONING 272 neg REG_VAR(rRef) 273#endif // USE_HEAP_POISONING 274END_MACRO 275 276// Macros to unpoison (negate) the reference for heap poisoning. 277MACRO1(UNPOISON_HEAP_REF, rRef) 278#ifdef USE_HEAP_POISONING 279 neg REG_VAR(rRef) 280#endif // USE_HEAP_POISONING 281END_MACRO 282 283 /* 284 * Macro that sets up the callee save frame to conform with 285 * Runtime::CreateCalleeSaveMethod(kSaveRefsOnly) 286 */ 287MACRO1(SETUP_SAVE_REFS_ONLY_FRAME, temp_reg) 288 PUSH edi // Save callee saves (ebx is saved/restored by the upcall) 289 PUSH esi 290 PUSH ebp 291 subl MACRO_LITERAL(12), %esp // Grow stack by 3 words. 292 CFI_ADJUST_CFA_OFFSET(12) 293 LOAD_RUNTIME_INSTANCE \temp_reg 294 // Push save all callee-save method. 295 pushl RUNTIME_SAVE_REFS_ONLY_METHOD_OFFSET(REG_VAR(temp_reg)) 296 CFI_ADJUST_CFA_OFFSET(4) 297 // Store esp as the top quick frame. 298 movl %esp, %fs:THREAD_TOP_QUICK_FRAME_OFFSET 299 300 // Ugly compile-time check, but we only have the preprocessor. 301 // Last +4: implicit return address pushed on stack when caller made call. 302#if (FRAME_SIZE_SAVE_REFS_ONLY != 3*4 + 16 + 4) 303#error "FRAME_SIZE_SAVE_REFS_ONLY(X86) size not as expected." 304#endif 305END_MACRO 306 307MACRO0(RESTORE_SAVE_REFS_ONLY_FRAME) 308 addl MACRO_LITERAL(16), %esp // Unwind stack up to saved values 309 CFI_ADJUST_CFA_OFFSET(-16) 310 POP ebp // Restore callee saves (ebx is saved/restored by the upcall) 311 POP esi 312 POP edi 313END_MACRO 314 315 /* 316 * Macro that sets up the callee save frame to conform with 317 * Runtime::CreateCalleeSaveMethod(kSaveAllCalleeSaves) 318 */ 319MACRO1(SETUP_SAVE_ALL_CALLEE_SAVES_FRAME, temp_reg) 320 PUSH edi // Save callee saves (ebx is saved/restored by the upcall) 321 PUSH esi 322 PUSH ebp 323 subl MACRO_LITERAL(12), %esp // Grow stack by 3 words. 324 CFI_ADJUST_CFA_OFFSET(12) 325 LOAD_RUNTIME_INSTANCE \temp_reg 326 // Push save all callee-save method. 327 pushl RUNTIME_SAVE_ALL_CALLEE_SAVES_METHOD_OFFSET(REG_VAR(temp_reg)) 328 CFI_ADJUST_CFA_OFFSET(4) 329 // Store esp as the top quick frame. 330 movl %esp, %fs:THREAD_TOP_QUICK_FRAME_OFFSET 331 // Ugly compile-time check, but we only have the preprocessor. 332 // Last +4: implicit return address pushed on stack when caller made call. 333#if (FRAME_SIZE_SAVE_ALL_CALLEE_SAVES != 3*4 + 16 + 4) 334#error "FRAME_SIZE_SAVE_ALL_CALLEE_SAVES(X86) size not as expected." 335#endif 336END_MACRO 337 338 /* 339 * Macro that sets up the callee save frame to conform with 340 * Runtime::CreateCalleeSaveMethod(kSaveRefsAndArgs), except for pushing the method 341 */ 342MACRO0(SETUP_SAVE_REFS_AND_ARGS_FRAME_REGISTERS_ONLY) 343 PUSH edi // Save callee saves 344 PUSH esi 345 PUSH ebp 346 PUSH_ARG ebx // Save args. 347 PUSH_ARG edx 348 PUSH_ARG ecx 349 // Create space for FPR args. 350 INCREASE_FRAME 4 * 8 351 // Save FPRs. 352 movsd %xmm0, 0(%esp) 353 movsd %xmm1, 8(%esp) 354 movsd %xmm2, 16(%esp) 355 movsd %xmm3, 24(%esp) 356 357 // Ugly compile-time check, but we only have the preprocessor. 358 // First +4: implicit return address pushed on stack when caller made call. 359 // Last +4: we're not pushing the method on the stack here. 360#if (FRAME_SIZE_SAVE_REFS_AND_ARGS != 4 + 6*4 + 4*8 + 4) 361#error "FRAME_SIZE_SAVE_REFS_AND_ARGS(X86) size not as expected." 362#endif 363END_MACRO 364 365MACRO0(RESTORE_SAVE_REFS_AND_ARGS_FRAME) 366 // Restore FPRs. The method is still on the stack. 367 movsd 4(%esp), %xmm0 368 movsd 12(%esp), %xmm1 369 movsd 20(%esp), %xmm2 370 movsd 28(%esp), %xmm3 371 372 DECREASE_FRAME 36 // Remove FPRs and method pointer. 373 374 POP_ARG ecx // Restore args 375 POP_ARG edx 376 POP_ARG ebx 377 POP ebp // Restore callee saves 378 POP esi 379 POP edi 380END_MACRO 381 382 /* 383 * Macro that calls through to artDeliverPendingExceptionFromCode, where the pending 384 * exception is Thread::Current()->exception_ when the runtime method frame is ready. 385 */ 386MACRO0(DELIVER_PENDING_EXCEPTION_FRAME_READY) 387 // Outgoing argument set up 388 INCREASE_FRAME 12 // alignment padding 389 pushl %fs:THREAD_SELF_OFFSET // pass Thread::Current() 390 CFI_ADJUST_CFA_OFFSET(4) 391 call SYMBOL(artDeliverPendingExceptionFromCode) // artDeliverPendingExceptionFromCode(Thread*) 392 UNREACHABLE 393 CFI_ADJUST_CFA_OFFSET(-16) // Reset CFA in case there is more code afterwards. 394END_MACRO 395 396 /* 397 * Macro that calls through to artDeliverPendingExceptionFromCode, where the pending 398 * exception is Thread::Current()->exception_. 399 */ 400MACRO0(DELIVER_PENDING_EXCEPTION) 401 SETUP_SAVE_ALL_CALLEE_SAVES_FRAME ebx // save callee saves for throw 402 DELIVER_PENDING_EXCEPTION_FRAME_READY 403END_MACRO 404 405MACRO0(RETURN_OR_DELIVER_PENDING_EXCEPTION) 406 cmpl MACRO_LITERAL(0),%fs:THREAD_EXCEPTION_OFFSET // exception field == 0 ? 407 jne 1f // if exception field != 0 goto 1 408 ret // return 4091: // deliver exception on current thread 410 DELIVER_PENDING_EXCEPTION 411END_MACRO 412 413// Locking is needed for both managed code and JNI stubs. 414MACRO4(LOCK_OBJECT_FAST_PATH, obj, tmp, saved_eax, slow_lock) 4151: 416 movl MIRROR_OBJECT_LOCK_WORD_OFFSET(REG_VAR(obj)), %eax // EAX := lock word 417 movl %fs:THREAD_ID_OFFSET, REG_VAR(tmp) // tmp: thread id. 418 xorl %eax, REG_VAR(tmp) // tmp: thread id with count 0 + read barrier bits. 419 testl LITERAL(LOCK_WORD_GC_STATE_MASK_SHIFTED_TOGGLED), %eax // Test the non-gc bits. 420 jnz 2f // Check if unlocked. 421 // Unlocked case - store tmp: original lock word plus thread id, preserved read barrier bits. 422 // EAX: old val, tmp: new val. 423 lock cmpxchg REG_VAR(tmp), MIRROR_OBJECT_LOCK_WORD_OFFSET(REG_VAR(obj)) 424 jnz 1b // cmpxchg failed retry 425 .ifnc \saved_eax, none 426 movl REG_VAR(saved_eax), %eax // Restore EAX. 427 .endif 428 ret 4292: // EAX: original lock word, tmp: thread id ^ EAX 430 // Check lock word state and thread id together, 431 testl LITERAL(LOCK_WORD_STATE_MASK_SHIFTED | LOCK_WORD_THIN_LOCK_OWNER_MASK_SHIFTED), \ 432 REG_VAR(tmp) 433 jne \slow_lock // Slow path if either of the two high bits are set. 434 // Increment the recursive lock count. 435 leal LOCK_WORD_THIN_LOCK_COUNT_ONE(%eax), REG_VAR(tmp) 436 testl LITERAL(LOCK_WORD_THIN_LOCK_COUNT_MASK_SHIFTED), REG_VAR(tmp) 437 jz \slow_lock // If count overflowed, go to slow lock. 438 // Update lockword for recursive lock, cmpxchg necessary for read barrier bits. 439 // EAX: old val, tmp: new val. 440 lock cmpxchg REG_VAR(tmp), MIRROR_OBJECT_LOCK_WORD_OFFSET(REG_VAR(obj)) 441 jnz 1b // cmpxchg failed retry 442 .ifnc \saved_eax, none 443 movl REG_VAR(saved_eax), %eax // Restore EAX. 444 .endif 445 ret 446END_MACRO 447 448// Unlocking is needed for both managed code and JNI stubs. 449MACRO4(UNLOCK_OBJECT_FAST_PATH, obj, tmp, saved_eax, slow_unlock) 4501: 451 movl MIRROR_OBJECT_LOCK_WORD_OFFSET(REG_VAR(obj)), %eax // EAX := lock word 452 movl %fs:THREAD_ID_OFFSET, REG_VAR(tmp) // tmp := thread id 453 xorl %eax, REG_VAR(tmp) // tmp := thread id ^ lock word 454 test LITERAL(LOCK_WORD_GC_STATE_MASK_SHIFTED_TOGGLED), REG_VAR(tmp) 455 jnz 2f // Check if simply locked. 456 // Transition to unlocked. 457#ifndef USE_READ_BARRIER 458 movl REG_VAR(tmp), MIRROR_OBJECT_LOCK_WORD_OFFSET(REG_VAR(obj)) 459#else 460 lock cmpxchg REG_VAR(tmp), MIRROR_OBJECT_LOCK_WORD_OFFSET(REG_VAR(obj)) 461 jnz 1b // cmpxchg failed retry 462#endif 463 .ifnc \saved_eax, none 464 movl REG_VAR(saved_eax), %eax // Restore EAX. 465 .endif 466 ret 4672: // EAX: original lock word, tmp: lock_word ^ thread id 468 // Check lock word state and thread id together. 469 testl LITERAL(LOCK_WORD_STATE_MASK_SHIFTED | LOCK_WORD_THIN_LOCK_OWNER_MASK_SHIFTED), \ 470 REG_VAR(tmp) 471 jnz \slow_unlock 472 // Update lockword for recursive unlock, cmpxchg necessary for read barrier bits. 473 // tmp: new lock word with decremented count. 474 leal -LOCK_WORD_THIN_LOCK_COUNT_ONE(%eax), REG_VAR(tmp) 475#ifndef USE_READ_BARRIER 476 movl REG_VAR(tmp), MIRROR_OBJECT_LOCK_WORD_OFFSET(REG_VAR(obj)) 477#else 478 lock cmpxchg REG_VAR(tmp), MIRROR_OBJECT_LOCK_WORD_OFFSET(REG_VAR(obj)) 479 jnz 1b // cmpxchg failed retry 480#endif 481 .ifnc \saved_eax, none 482 movl REG_VAR(saved_eax), %eax // Restore EAX. 483 .endif 484 ret 485END_MACRO 486 487#endif // ART_RUNTIME_ARCH_X86_ASM_SUPPORT_X86_S_ 488