1%def bincmp(condition=""): 2 /* 3 * Generic two-operand compare-and-branch operation. Provide a "condition" 4 * fragment that specifies the comparison to perform. 5 * 6 * For: if-eq, if-ne, if-lt, if-ge, if-gt, if-le 7 */ 8 /* if-cmp vA, vB, +CCCC */ 9 mov r1, rINST, lsr #12 @ r1<- B 10 ubfx r0, rINST, #8, #4 @ r0<- A 11 GET_VREG r3, r1 @ r3<- vB 12 GET_VREG r0, r0 @ r0<- vA 13 FETCH_S rINST, 1 @ rINST<- branch offset, in code units 14 cmp r0, r3 @ compare (vA, vB) 15 b${condition} 1f 16 FETCH_ADVANCE_INST 2 17 GET_INST_OPCODE ip // extract opcode from rINST 18 GOTO_OPCODE ip // jump to next instruction 191: 20 FETCH_S rINST, 1 // rINST<- branch offset, in code units 21 BRANCH 22 23%def zcmp(condition=""): 24 /* 25 * Generic one-operand compare-and-branch operation. Provide a "condition" 26 * fragment that specifies the comparison to perform. 27 * 28 * for: if-eqz, if-nez, if-ltz, if-gez, if-gtz, if-lez 29 */ 30 /* if-cmp vAA, +BBBB */ 31 mov r0, rINST, lsr #8 @ r0<- AA 32 GET_VREG r0, r0 @ r0<- vAA 33 FETCH_S rINST, 1 @ rINST<- branch offset, in code units 34 cmp r0, #0 // compare (vA, 0) 35 b${condition} 1f 36 FETCH_ADVANCE_INST 2 37 GET_INST_OPCODE ip // extract opcode from rINST 38 GOTO_OPCODE ip // jump to next instruction 391: 40 FETCH_S rINST, 1 // rINST<- branch offset, in code units 41 BRANCH 42 43%def op_goto(): 44/* 45 * Unconditional branch, 8-bit offset. 46 * 47 * The branch distance is a signed code-unit offset, which we need to 48 * double to get a byte offset. 49 */ 50 /* goto +AA */ 51 sbfx rINST, rINST, #8, #8 // rINST<- ssssssAA (sign-extended) 52 BRANCH 53 54%def op_goto_16(): 55/* 56 * Unconditional branch, 16-bit offset. 57 * 58 * The branch distance is a signed code-unit offset, which we need to 59 * double to get a byte offset. 60 */ 61 /* goto/16 +AAAA */ 62 FETCH_S rINST, 1 // wINST<- ssssAAAA (sign-extended) 63 BRANCH 64 65%def op_goto_32(): 66/* 67 * Unconditional branch, 32-bit offset. 68 * 69 * The branch distance is a signed code-unit offset, which we need to 70 * double to get a byte offset. 71 * 72 * Because we need the SF bit set, we'll use an adds 73 * to convert from Dalvik offset to byte offset. 74 */ 75 /* goto/32 +AAAAAAAA */ 76 FETCH r0, 1 // r0<- aaaa (lo) 77 FETCH r1, 2 // r1<- AAAA (hi) 78 orrs rINST, r0, r1, lsl #16 // wINST<- AAAAaaaa 79 BRANCH 80 81%def op_if_eq(): 82% bincmp(condition="eq") 83 84%def op_if_eqz(): 85% zcmp(condition="eq") 86 87%def op_if_ge(): 88% bincmp(condition="ge") 89 90%def op_if_gez(): 91% zcmp(condition="ge") 92 93%def op_if_gt(): 94% bincmp(condition="gt") 95 96%def op_if_gtz(): 97% zcmp(condition="gt") 98 99%def op_if_le(): 100% bincmp(condition="le") 101 102%def op_if_lez(): 103% zcmp(condition="le") 104 105%def op_if_lt(): 106% bincmp(condition="lt") 107 108%def op_if_ltz(): 109% zcmp(condition="lt") 110 111%def op_if_ne(): 112% bincmp(condition="ne") 113 114%def op_if_nez(): 115% zcmp(condition="ne") 116 117%def op_packed_switch(func="NterpDoPackedSwitch"): 118/* 119 * Handle a packed-switch or sparse-switch instruction. In both cases 120 * we decode it and hand it off to a helper function. 121 * 122 * We don't really expect backward branches in a switch statement, but 123 * they're perfectly legal, so we check for them here. 124 * 125 * for: packed-switch, sparse-switch 126 */ 127 /* op vAA, +BBBB */ 128 FETCH r0, 1 @ r0<- bbbb (lo) 129 FETCH r1, 2 @ r1<- BBBB (hi) 130 mov r3, rINST, lsr #8 @ r3<- AA 131 orr r0, r0, r1, lsl #16 @ r0<- BBBBbbbb 132 GET_VREG r1, r3 @ r1<- vAA 133 add r0, rPC, r0, lsl #1 @ r0<- PC + BBBBbbbb*2 134 bl $func @ r0<- code-unit branch offset 135 mov rINST, r0 136 BRANCH 137 138%def op_sparse_switch(): 139% op_packed_switch(func="NterpDoSparseSwitch") 140 141/* 142 * Return a 32-bit value. 143 */ 144%def op_return(is_object="0", is_void="0", is_wide="0"): 145 .if $is_void 146 // Thread fence for constructor 147 dmb ishst 148 .else 149 mov r2, rINST, lsr #8 @ r2<- AA 150 .if $is_wide 151 VREG_INDEX_TO_ADDR r2, r2 152 GET_VREG_WIDE_BY_ADDR r0, r1, r2 // r0,r1 <- vAA 153 // In case we're going back to compiled code, put the 154 // result also in d0. 155 vmov d0, r0, r1 156 .else 157 GET_VREG r0, r2 // r0<- vAA 158 .if !$is_object 159 // In case we're going back to compiled code, put the 160 // result also in s0. 161 vmov s0, r0 162 .endif 163 .endif 164 .endif 165 .cfi_remember_state 166 ldr ip, [rREFS, #-4] 167 mov sp, ip 168 .cfi_def_cfa sp, CALLEE_SAVES_SIZE 169 RESTORE_ALL_CALLEE_SAVES lr_to_pc=1 170 .cfi_restore_state 171 CFI_DEF_CFA_BREG_PLUS_UCONST CFI_REFS, -4, CALLEE_SAVES_SIZE 172 173%def op_return_object(): 174% op_return(is_object="1", is_void="0", is_wide="0") 175 176%def op_return_void(): 177% op_return(is_object="0", is_void="1", is_wide="0") 178 179%def op_return_wide(): 180% op_return(is_object="0", is_void="0", is_wide="1") 181 182%def op_throw(): 183 EXPORT_PC 184 mov r2, rINST, lsr #8 @ r2<- AA 185 GET_VREG r0, r2 @ r0<- vAA (exception object) 186 mov r1, rSELF 187 bl art_quick_deliver_exception 188 bkpt 0 189