/art/runtime/interpreter/mterp/x86ng/ |
D | arithmetic.S | 7 movzbl 2(rPC), %eax # eax <- BB 9 GET_VREG %eax, %eax # eax <- vBB 14 movl %eax, %edx 24 cmpl $$0x80000000, %eax 39 movl %eax, %edx 42 andl $$0x000000FF, %eax 59 movzx rINSTbl, %ecx # eax <- BA 62 GET_VREG %ecx, %ecx # eax <- vBB 64 GET_VREG %eax, rINST # eax <- vBB 69 cmpl $$0x80000000, %eax [all …]
|
D | other.S | 6 movl 2(rPC), %eax # grab all 32 bits at once 7 SET_VREG %eax, rINST # vAA<- eax 18 movsbl rINSTbl, %eax # eax <-ssssssBx 20 sarl MACRO_LITERAL(4), %eax 21 SET_VREG %eax, rINST 26 movzwl 2(rPC), %eax # eax <- 0000BBBB 27 sall MACRO_LITERAL(16), %eax # eax <- BBBB0000 28 SET_VREG %eax, rINST # vAA <- eax 37 SET_VREG_OBJECT %eax, rINST # vAA <- value 74 movl 2(rPC), %eax # eax <- lsw [all …]
|
D | array.S | 9 movzbl 2(rPC), %eax # eax <- BB 11 GET_VREG %eax, %eax # eax <- vBB (array object) 13 testl %eax, %eax # null array object? 15 cmpl MIRROR_ARRAY_LENGTH_OFFSET(%eax), %ecx 18 movq $data_offset(%eax,%ecx,8), %xmm0 22 testb $$READ_BARRIER_TEST_VALUE, GRAY_BYTE_OFFSET(%eax) 23 $load $data_offset(%eax,%ecx,$multiplier), %eax 25 UNPOISON_HEAP_REF eax // Affects flags, so we cannot unpoison before the jnz. 27 SET_VREG_OBJECT %eax, rINST 30 UNPOISON_HEAP_REF eax [all …]
|
D | object.S | 6 GET_VREG %eax, rINST 7 testl %eax, %eax 11 cmpl MIRROR_OBJECT_CLASS_OFFSET(%eax), %ecx 21 movl %eax, %ecx 44 movzbl rINSTbl, %eax 45 sarl $$4,%eax # eax<- B 46 GET_VREG %eax %eax # eax<- vB (object) 47 testl %eax, %eax 51 cmpl MIRROR_OBJECT_CLASS_OFFSET(%eax), %ecx 54 movl $$1, %eax [all …]
|
D | main.S | 43 #define ARG0 %eax 63 #define rNEW_REFS %eax 217 PUSH_ARG eax 227 POP_ARG eax 274 PUSH_ARG eax 308 movl (%esp), %eax 309 movzwl ART_METHOD_HOTNESS_COUNT_OFFSET(%eax), %ecx 318 movw %cx, ART_METHOD_HOTNESS_COUNT_OFFSET(%eax) 345 movl %edx, %eax 346 sarl $$COMPACT_CODE_ITEM_INS_SIZE_SHIFT, %eax [all …]
|
D | invoke.S | 6 movzwl 2(rPC), %eax // call_site index, first argument of runtime call. 11 movzwl 2(rPC), %eax // call_site index, first argument of runtime call. 37 testl MACRO_LITERAL(1), %eax 39 andl $$-2, %eax // Remove the extra bit that marks it's a String.<init> method. 93 testl $$3, %eax 96 movd %eax, %xmm7 97 movzw ART_METHOD_IMT_INDEX_OFFSET(%eax), %eax 100 movl (%edx, %eax, 4), %eax 103 testl $$1, %eax 110 andl $$-4, %eax [all …]
|
D | control_flow.S | 13 GET_VREG %eax, %ecx # eax <- vA 14 cmpl VREG_ADDRESS(rINST), %eax # compare (vA, vB) 117 movl 2(rPC), ARG0 # eax <- BBBBbbbb 118 leal (rPC,ARG0,2), ARG0 # eax <- PC + BBBBbbbb*2 126 movl %eax, rINST 133 GET_VREG %eax, rINST # eax <- vAA 137 movd %eax, %xmm0 166 GET_VREG %eax, rINST # eax <- vAA 182 GET_VREG ARG0, rINST # eax <- vAA (exception object)
|
D | floating_point.S | 20 movzbl 2(rPC), %eax # eax<- BB 21 GET_VREG_XMM${suff} %xmm0, %eax 22 xor %eax, %eax 28 incl %eax 32 decl %eax 34 SET_VREG %eax, rINST 57 movzbl 3(rPC), %eax # eax <- CC 60 v${instr}${suff} VREG_ADDRESS(%eax), %xmm0, %xmm0 65 ${instr}${suff} VREG_ADDRESS(%eax), %xmm0 177 movzbl 2(rPC), %eax # eax <- CC [all …]
|
/art/runtime/arch/x86/ |
D | jni_entrypoints_x86.S | 30 PUSH_ARG eax 45 POP_ARG eax 69 PUSH_ARG eax 84 POP_ARG eax 99 movl (%esp), %eax // Thread* self 100 movl THREAD_TOP_QUICK_FRAME_OFFSET(%eax), %eax // uintptr_t tagged_quick_frame 101 andl LITERAL(TAGGED_JNI_SP_MASK_TOGGLED32), %eax // ArtMethod** sp 102 movl (%eax), %eax // ArtMethod* method 104 ART_METHOD_ACCESS_FLAGS_OFFSET(%eax) 112 testl %eax, %eax // Check if returned method code is null. [all …]
|
D | quick_entrypoints_x86.S | 76 pushl %eax // Store the ArtMethod reference at the bottom of the stack. 86 POP eax // Restore Method* 117 PUSH eax 193 POP eax 237 PUSH eax // pass arg1 247 PUSH eax // alignment padding 251 PUSH eax // pass arg1 270 movl (FRAME_SIZE_SAVE_EVERYTHING - 2 * __SIZEOF_POINTER__)(%esp), %eax 277 PUSH eax // pass arg1 334 PUSH eax // pass arg1 [all …]
|
D | memcmp16_x86.S | 55 movl BLK1(%esp), %eax 62 add %ecx, %eax 69 xor %eax, %eax 78 movdqu (%eax), %xmm3 80 movl %eax, %edi 120 xor %eax, %eax 132 lea (%ecx, %edi,1), %eax 144 xor %eax, %eax 178 lea (%ecx, %edi,1), %eax 190 mov %edx, %eax [all …]
|
D | asm_support_x86.S | 411 movl MIRROR_OBJECT_LOCK_WORD_OFFSET(REG_VAR(obj)), %eax // EAX := lock word 413 xorl %eax, REG_VAR(tmp) // tmp: thread id with count 0 + read barrier bits. 414 testl LITERAL(LOCK_WORD_GC_STATE_MASK_SHIFTED_TOGGLED), %eax // Test the non-gc bits. 421 movl REG_VAR(saved_eax), %eax // Restore EAX. 430 leal LOCK_WORD_THIN_LOCK_COUNT_ONE(%eax), REG_VAR(tmp) 438 movl REG_VAR(saved_eax), %eax // Restore EAX. 446 movl MIRROR_OBJECT_LOCK_WORD_OFFSET(REG_VAR(obj)), %eax // EAX := lock word 448 xorl %eax, REG_VAR(tmp) // tmp := thread id ^ lock word 459 movl REG_VAR(saved_eax), %eax // Restore EAX. 469 leal -LOCK_WORD_THIN_LOCK_COUNT_ONE(%eax), REG_VAR(tmp) [all …]
|
/art/runtime/interpreter/mterp/x86_64ng/ |
D | object.S | 28 movl MIRROR_OBJECT_CLASS_OFFSET(%edi), %eax 29 UNPOISON_HEAP_REF eax 33 movl MIRROR_CLASS_SUPER_CLASS_OFFSET(%eax), %eax 34 UNPOISON_HEAP_REF eax 35 cmpl %eax, %esi 37 testl %eax, %eax 51 movl MIRROR_CLASS_COMPONENT_TYPE_OFFSET(%eax), %eax 52 UNPOISON_HEAP_REF eax 54 testl %eax, %eax 64 cmpw $$0, MIRROR_CLASS_OBJECT_PRIMITIVE_TYPE_OFFSET(%eax) [all …]
|
D | arithmetic.S | 9 GET_WIDE_VREG %rax, %rax # eax <- vBB 12 GET_VREG %eax, %rax # eax <- vBB 25 SET_WIDE_VREG $result, rINSTq # eax <- vBB 27 SET_VREG $result, rINSTq # eax <- vBB 70 GET_WIDE_VREG %rax, rINSTq # eax <- vA 73 GET_VREG %eax, rINSTq # eax <- vA 128 movl rINST, %eax # rax <- 000000BA 129 sarl $$4, %eax # eax <- B 130 GET_VREG %eax, %rax # eax <- vB 137 cdq # rax <- sign-extended of eax [all …]
|
D | other.S | 6 movl 2(rPC), %eax # grab all 32 bits at once 7 SET_VREG %eax, rINSTq # vAA<- eax 18 movsbl rINSTbl, %eax # eax <-ssssssBx 20 sarl MACRO_LITERAL(4), %eax 21 SET_VREG %eax, rINSTq 26 movzwl 2(rPC), %eax # eax <- 0000BBBB 27 sall MACRO_LITERAL(16), %eax # eax <- BBBB0000 28 SET_VREG %eax, rINSTq # vAA <- eax 37 SET_VREG_OBJECT %eax, rINSTq # vAA <- value 86 movslq 2(rPC), %rax # eax <- ssssssssBBBBbbbb [all …]
|
D | control_flow.S | 13 GET_VREG %eax, %rcx # eax <- vA 14 cmpl VREG_ADDRESS(rINSTq), %eax # compare (vA, vB) 122 GET_VREG OUT_32_ARG1, rINSTq # eax <- vAA 124 movslq %eax, rINSTq 131 GET_VREG %eax, rINSTq # eax <- vAA 135 movd %eax, %xmm0 157 GET_WIDE_VREG %rax, rINSTq # eax <- vAA
|
D | array.S | 9 movzbq 2(rPC), %rax # eax <- BB 11 GET_VREG %edi, %rax # eax <- vBB (array object) 23 $load $data_offset(%rdi,%rsi,$shift), %eax 25 UNPOISON_HEAP_REF eax // Affects flags, so we cannot unpoison before the jnz. 27 SET_VREG_OBJECT %eax, rINSTq 30 UNPOISON_HEAP_REF eax 35 $load $data_offset(%rdi,%rsi,$shift), %eax 36 SET_VREG %eax, rINSTq 115 movl rINST, %eax # eax <- BA 120 andb $$0xf, %al # eax <- A
|
D | invoke.S | 26 movl (%esi), %eax 37 testl MACRO_LITERAL(1), %eax 61 movl (%esi), %eax 71 movl (%esi), %eax 91 testl $$3, %eax 99 testl $$1, %eax 170 movl %eax, %edi
|
D | main.S | 428 GET_VREG %eax, %rax 429 movl %eax, (%rsp) 440 movzbl 1(rPC), %eax 443 GET_VREG %eax, %rax 444 movl %eax, 4(%rsp) 457 movzbl 1(rPC), %eax 487 movzbl 1(rPC), %eax 496 GET_VREG %eax, %rax 497 movl %eax, (%rsp) 508 movzbl 1(rPC), %eax [all …]
|
D | floating_point.S | 20 movzbq 2(rPC), %rax # eax<- BB 22 xor %eax, %eax 32 movl $$-1, %eax 34 SET_VREG %eax, rINSTq 57 movzbq 3(rPC), %rax # eax <- CC 177 movzbq 2(rPC), %rax # eax <- CC 210 movzbq 2(rPC), %rax # eax <- CC
|
/art/runtime/arch/x86_64/ |
D | memcmp16_x86_64.S | 788 xor %eax, %eax 799 mov -4(%rdi), %eax 800 cmp %eax, %ecx 803 xor %eax, %eax 838 movzwl -2(%rdi), %eax 842 and $0xffff, %eax 844 sub %ecx, %eax 857 xor %eax, %eax 862 mov -6(%rdi), %eax 864 cmp %eax, %ecx [all …]
|
D | quick_entrypoints_x86_64.S | 696 movl %eax, %edi // pass the index of the constant as arg0 699 testl %eax, %eax // If result is null, deliver pending exception. 769 testl %eax, %eax // eax == 0 ? 821 movl MIRROR_CLASS_OBJECT_SIZE_ALLOC_FAST_PATH_OFFSET(%rdi), %eax 825 cmpl LITERAL(ROSALLOC_MAX_THREAD_LOCAL_BRACKET_SIZE), %eax 846 movl %eax, (%rcx) 1245 movl MIRROR_OBJECT_CLASS_OFFSET(%rdx), %eax 1246 UNPOISON_HEAP_REF eax 1249 cmpl %eax, %ecx 1255 movl %eax, %esi // Pass arg2 - type of the value to be stored. [all …]
|
D | asm_support_x86_64.S | 494 movl MIRROR_OBJECT_LOCK_WORD_OFFSET(REG_VAR(obj)), %eax // EAX := lock word 496 xorl %eax, REG_VAR(tmp) // tmp: thread id with count 0 + read barrier bits. 497 testl LITERAL(LOCK_WORD_GC_STATE_MASK_SHIFTED_TOGGLED), %eax // Test the non-gc bits. 509 leal LOCK_WORD_THIN_LOCK_COUNT_ONE(%eax), REG_VAR(tmp) 522 movl MIRROR_OBJECT_LOCK_WORD_OFFSET(REG_VAR(obj)), %eax // EAX := lock word 524 xorl %eax, REG_VAR(tmp) // tmp := thread id ^ lock word 545 leal -LOCK_WORD_THIN_LOCK_COUNT_ONE(%eax), REG_VAR(tmp)
|
D | jni_entrypoints_x86_64.S | 454 testl %eax, %eax
|
/art/runtime/ |
D | trace.cc | 172 uint32_t eax, ebx, ecx; in GetScalingFactorForX86() local 173 asm volatile("cpuid" : "=a"(eax), "=b"(ebx), "=c"(ecx) : "a"(0x0), "c"(0)); in GetScalingFactorForX86() 174 if (eax < 0x15) { in GetScalingFactorForX86() 195 asm volatile("cpuid" : "=a"(eax), "=b"(ebx), "=c"(ecx) : "a"(0x15), "c"(0)); in GetScalingFactorForX86() 204 double scaling_factor = (seconds_to_microseconds * eax) / (coreCrystalFreq * ebx); in GetScalingFactorForX86()
|