• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1%def header():
2/*
3 * Copyright (C) 2019 The Android Open Source Project
4 *
5 * Licensed under the Apache License, Version 2.0 (the "License");
6 * you may not use this file except in compliance with the License.
7 * You may obtain a copy of the License at
8 *
9 *      http://www.apache.org/licenses/LICENSE-2.0
10 *
11 * Unless required by applicable law or agreed to in writing, software
12 * distributed under the License is distributed on an "AS IS" BASIS,
13 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 * See the License for the specific language governing permissions and
15 * limitations under the License.
16 */
17
18/*
19 * This is a #include, not a %include, because we want the C pre-processor
20 * to expand the macros into assembler assignment statements.
21 */
22#include "asm_support.h"
23#include "arch/x86_64/asm_support_x86_64.S"
24
25/**
26 * x86_64 ABI general notes:
27 *
28 * Caller save set:
29 *    rax, rdx, rcx, rsi, rdi, r8-r11, st(0)-st(7)
30 * Callee save set:
31 *    rbx, rbp, r12-r15
32 * Return regs:
33 *    32-bit in eax
34 *    64-bit in rax
35 *    fp on xmm0
36 *
37 * First 8 fp parameters came in xmm0-xmm7.
38 * First 6 non-fp parameters came in rdi, rsi, rdx, rcx, r8, r9.
39 * Other parameters passed on stack, pushed right-to-left.  On entry to target, first
40 * param is at 8(%esp).
41 *
42 * Stack must be 16-byte aligned to support SSE in native code.
43 */
44
45#define IN_ARG3        %rcx
46#define IN_ARG2        %rdx
47#define IN_ARG1        %rsi
48#define IN_ARG0        %rdi
49/* Out Args  */
50#define OUT_ARG3       %rcx
51#define OUT_ARG2       %rdx
52#define OUT_ARG1       %rsi
53#define OUT_ARG0       %rdi
54#define OUT_32_ARG3    %ecx
55#define OUT_32_ARG2    %edx
56#define OUT_32_ARG1    %esi
57#define OUT_32_ARG0    %edi
58#define OUT_FP_ARG1    %xmm1
59#define OUT_FP_ARG0    %xmm0
60
61/*
62 * single-purpose registers, given names for clarity
63 */
64#define rSELF    %gs
65#define rPC      %r12
66#define CFI_DEX  12 // DWARF register number of the register holding dex-pc (rPC).
67#define CFI_TMP  5  // DWARF register number of the first argument register (rdi).
68#define rFP      %r13
69#define rINST    %ebx
70#define rINSTq   %rbx
71#define rINSTw   %bx
72#define rINSTbh  %bh
73#define rINSTbl  %bl
74#define rIBASE   %r14
75#define rREFS    %r15
76#define rREFS32  %r15d
77#define CFI_REFS 15 // DWARF register number of the reference array (r15).
78
79// Temporary registers while setting up a frame.
80#define rNEW_FP   %r8
81#define rNEW_REFS %r9
82#define rNEW_REFS32 %r9d
83#define CFI_NEW_REFS 9
84
85/*
86 * Get/set the 32-bit value from a Dalvik register.
87 */
88#define VREG_ADDRESS(_vreg) (rFP,_vreg,4)
89#define VREG_HIGH_ADDRESS(_vreg) 4(rFP,_vreg,4)
90#define VREG_REF_ADDRESS(_vreg) (rREFS,_vreg,4)
91#define VREG_REF_HIGH_ADDRESS(_vreg) 4(rREFS,_vreg,4)
92
93// Includes the return address implictly pushed on stack by 'call'.
94#define CALLEE_SAVES_SIZE (6 * 8 + 4 * 8 + 1 * 8)
95
96// +8 for the ArtMethod of the caller.
97#define OFFSET_TO_FIRST_ARGUMENT_IN_STACK (CALLEE_SAVES_SIZE + 8)
98
99/*
100 * Refresh rINST.
101 * At enter to handler rINST does not contain the opcode number.
102 * However some utilities require the full value, so this macro
103 * restores the opcode number.
104 */
105.macro REFRESH_INST _opnum
106    movb    rINSTbl, rINSTbh
107    movb    $$\_opnum, rINSTbl
108.endm
109
110/*
111 * Fetch the next instruction from rPC into rINSTw.  Does not advance rPC.
112 */
113.macro FETCH_INST
114    movzwq  (rPC), rINSTq
115.endm
116
117/*
118 * Remove opcode from rINST, compute the address of handler and jump to it.
119 */
120.macro GOTO_NEXT
121    movzx   rINSTbl,%ecx
122    movzbl  rINSTbh,rINST
123    shll    MACRO_LITERAL(${handler_size_bits}), %ecx
124    addq    rIBASE, %rcx
125    jmp     *%rcx
126.endm
127
128/*
129 * Advance rPC by instruction count.
130 */
131.macro ADVANCE_PC _count
132    leaq    2*\_count(rPC), rPC
133.endm
134
135/*
136 * Advance rPC by instruction count, fetch instruction and jump to handler.
137 */
138.macro ADVANCE_PC_FETCH_AND_GOTO_NEXT _count
139    ADVANCE_PC \_count
140    FETCH_INST
141    GOTO_NEXT
142.endm
143
144.macro GET_VREG _reg _vreg
145    movl    VREG_ADDRESS(\_vreg), \_reg
146.endm
147
148.macro GET_VREG_OBJECT _reg _vreg
149    movl    VREG_REF_ADDRESS(\_vreg), \_reg
150.endm
151
152/* Read wide value. */
153.macro GET_WIDE_VREG _reg _vreg
154    movq    VREG_ADDRESS(\_vreg), \_reg
155.endm
156
157.macro SET_VREG _reg _vreg
158    movl    \_reg, VREG_ADDRESS(\_vreg)
159    movl    MACRO_LITERAL(0), VREG_REF_ADDRESS(\_vreg)
160.endm
161
162/* Write wide value. reg is clobbered. */
163.macro SET_WIDE_VREG _reg _vreg
164    movq    \_reg, VREG_ADDRESS(\_vreg)
165    xorq    \_reg, \_reg
166    movq    \_reg, VREG_REF_ADDRESS(\_vreg)
167.endm
168
169.macro SET_VREG_OBJECT _reg _vreg
170    movl    \_reg, VREG_ADDRESS(\_vreg)
171    movl    \_reg, VREG_REF_ADDRESS(\_vreg)
172.endm
173
174.macro GET_VREG_HIGH _reg _vreg
175    movl    VREG_HIGH_ADDRESS(\_vreg), \_reg
176.endm
177
178.macro SET_VREG_HIGH _reg _vreg
179    movl    \_reg, VREG_HIGH_ADDRESS(\_vreg)
180    movl    MACRO_LITERAL(0), VREG_REF_HIGH_ADDRESS(\_vreg)
181.endm
182
183.macro CLEAR_REF _vreg
184    movl    MACRO_LITERAL(0), VREG_REF_ADDRESS(\_vreg)
185.endm
186
187.macro CLEAR_WIDE_REF _vreg
188    movl    MACRO_LITERAL(0), VREG_REF_ADDRESS(\_vreg)
189    movl    MACRO_LITERAL(0), VREG_REF_HIGH_ADDRESS(\_vreg)
190.endm
191
192.macro GET_VREG_XMMs _xmmreg _vreg
193    movss VREG_ADDRESS(\_vreg), \_xmmreg
194.endm
195.macro GET_VREG_XMMd _xmmreg _vreg
196    movsd VREG_ADDRESS(\_vreg), \_xmmreg
197.endm
198.macro SET_VREG_XMMs _xmmreg _vreg
199    movss \_xmmreg, VREG_ADDRESS(\_vreg)
200.endm
201.macro SET_VREG_XMMd _xmmreg _vreg
202    movsd \_xmmreg, VREG_ADDRESS(\_vreg)
203.endm
204
205// An assembly entry that has a OatQuickMethodHeader prefix.
206.macro OAT_ENTRY name, end
207    FUNCTION_TYPE(\name)
208    ASM_HIDDEN SYMBOL(\name)
209    .global SYMBOL(\name)
210    .balign 16
211    // Padding of 3 * 4 bytes to get 16 bytes alignment of code entry.
212    .long 0
213    .long 0
214    .long 0
215    // OatQuickMethodHeader. Note that the top two bits must be clear.
216    .long (SYMBOL(\end) - SYMBOL(\name))
217SYMBOL(\name):
218.endm
219
220.macro ENTRY name
221    .text
222    ASM_HIDDEN SYMBOL(\name)
223    .global SYMBOL(\name)
224    FUNCTION_TYPE(\name)
225SYMBOL(\name):
226.endm
227
228.macro END name
229    SIZE(\name)
230.endm
231
232// Macro for defining entrypoints into runtime. We don't need to save registers
233// (we're not holding references there), but there is no
234// kDontSave runtime method. So just use the kSaveRefsOnly runtime method.
235.macro NTERP_TRAMPOLINE name, helper
236DEFINE_FUNCTION \name
237  SETUP_SAVE_REFS_ONLY_FRAME
238  call \helper
239  RESTORE_SAVE_REFS_ONLY_FRAME
240  cmpq LITERAL(0), %gs:THREAD_EXCEPTION_OFFSET
241  jne nterp_deliver_pending_exception
242  ret
243END_FUNCTION \name
244.endm
245
246.macro CLEAR_VOLATILE_MARKER reg
247  andq MACRO_LITERAL(-2), \reg
248.endm
249
250.macro EXPORT_PC
251    movq    rPC, -16(rREFS)
252.endm
253
254
255.macro BRANCH
256    leaq    (rPC, rINSTq, 2), rPC
257    // Update method counter and do a suspend check if the branch is negative or zero.
258    testq rINSTq, rINSTq
259    jle 3f
2602:  // We use 2 and not 1 for this local label as the users of the BRANCH macro have a 1 label.
261    FETCH_INST
262    GOTO_NEXT
2633:
264    movq (%rsp), %rdi
265    movzwl ART_METHOD_HOTNESS_COUNT_OFFSET(%rdi), %esi
266#if (NTERP_HOTNESS_VALUE != 0)
267#error Expected 0 for hotness value
268#endif
269    // If the counter is at zero, handle this in the runtime.
270    testw %si, %si
271    je NterpHandleHotnessOverflow
272    // Update counter.
273    addl $$-1, %esi
274    movw %si, ART_METHOD_HOTNESS_COUNT_OFFSET(%rdi)
275    DO_SUSPEND_CHECK continue_label=2b
276    jmp 2b
277.endm
278
279// Expects:
280// - r10, and r11 to be available.
281// Outputs:
282// - \registers contains the dex registers size
283// - \outs contains the outs size
284// - if load_ins is 1, \ins contains the ins
285// - \code_item is replace with a pointer to the instructions
286.macro FETCH_CODE_ITEM_INFO code_item, registers, outs, ins, load_ins
287    testq MACRO_LITERAL(1), \code_item
288    je 5f
289    andq $$-2, \code_item  // Remove the extra bit that marks it's a compact dex file.
290    movzwl COMPACT_CODE_ITEM_FIELDS_OFFSET(\code_item), %r10d
291    movl %r10d, \registers
292    sarl $$COMPACT_CODE_ITEM_REGISTERS_SIZE_SHIFT, \registers
293    andl $$0xf, \registers
294    movl %r10d, \outs
295    sarl $$COMPACT_CODE_ITEM_OUTS_SIZE_SHIFT, \outs
296    andl $$0xf, \outs
297    .if \load_ins
298    movl %r10d, \ins
299    sarl $$COMPACT_CODE_ITEM_INS_SIZE_SHIFT, \ins
300    andl $$0xf, \ins
301    .else
302    movl %r10d, %r11d
303    sarl $$COMPACT_CODE_ITEM_INS_SIZE_SHIFT, %r11d
304    andl $$0xf, %r11d
305    addl %r11d, \registers
306    .endif
307    testw $$COMPACT_CODE_ITEM_REGISTERS_INS_OUTS_FLAGS, COMPACT_CODE_ITEM_FLAGS_OFFSET(\code_item)
308    je 4f
309    movq \code_item, %r11
310    testw $$COMPACT_CODE_ITEM_INSNS_FLAG, COMPACT_CODE_ITEM_FLAGS_OFFSET(\code_item)
311    je 1f
312    subq $$4, %r11
3131:
314    testw $$COMPACT_CODE_ITEM_REGISTERS_FLAG, COMPACT_CODE_ITEM_FLAGS_OFFSET(\code_item)
315    je 2f
316    subq $$2, %r11
317    movzwl (%r11), %r10d
318    addl %r10d, \registers
3192:
320    testw $$COMPACT_CODE_ITEM_INS_FLAG, COMPACT_CODE_ITEM_FLAGS_OFFSET(\code_item)
321    je 3f
322    subq $$2, %r11
323    movzwl (%r11), %r10d
324    .if \load_ins
325    addl %r10d, \ins
326    .else
327    addl %r10d, \registers
328    .endif
3293:
330    testw $$COMPACT_CODE_ITEM_OUTS_FLAG, COMPACT_CODE_ITEM_FLAGS_OFFSET(\code_item)
331    je 4f
332    subq $$2, %r11
333    movzwl (%r11), %r10d
334    addl %r10d, \outs
3354:
336    .if \load_ins
337    addl \ins, \registers
338    .endif
339    addq $$COMPACT_CODE_ITEM_INSNS_OFFSET, \code_item
340    jmp 6f
3415:
342    // Fetch dex register size.
343    movzwl CODE_ITEM_REGISTERS_SIZE_OFFSET(\code_item), \registers
344    // Fetch outs size.
345    movzwl CODE_ITEM_OUTS_SIZE_OFFSET(\code_item), \outs
346    .if \load_ins
347    movzwl CODE_ITEM_INS_SIZE_OFFSET(\code_item), \ins
348    .endif
349    addq $$CODE_ITEM_INSNS_OFFSET, \code_item
3506:
351.endm
352
353// Setup the stack to start executing the method. Expects:
354// - rdi to contain the ArtMethod
355// - rbx, r10, r11 to be available.
356//
357// Outputs
358// - rbx contains the dex registers size
359// - r11 contains the old stack pointer.
360// - \code_item is replace with a pointer to the instructions
361// - if load_ins is 1, r14 contains the ins
362.macro SETUP_STACK_FRAME code_item, refs, refs32, fp, cfi_refs, load_ins
363    FETCH_CODE_ITEM_INFO \code_item, %ebx, \refs32, %r14d, \load_ins
364
365    // Compute required frame size for dex registers: ((2 * ebx) + refs)
366    leaq (\refs, %rbx, 2), %r11
367    salq $$2, %r11
368
369    // Compute new stack pointer in r10: add 24 for saving the previous frame,
370    // pc, and method being executed.
371    leaq -24(%rsp), %r10
372    subq %r11, %r10
373    // Alignment
374    // Note: There may be two pieces of alignment but there is no need to align
375    // out args to `kPointerSize` separately before aligning to kStackAlignment.
376    andq $$-16, %r10
377
378    // Set reference and dex registers, align to pointer size for previous frame and dex pc.
379    leaq 24 + 4(%r10, \refs, 4), \refs
380    andq LITERAL(-__SIZEOF_POINTER__), \refs
381    leaq (\refs, %rbx, 4), \fp
382
383    // Now setup the stack pointer.
384    movq %rsp, %r11
385    CFI_DEF_CFA_REGISTER(r11)
386    movq %r10, %rsp
387    movq %r11, -8(\refs)
388    CFI_DEF_CFA_BREG_PLUS_UCONST \cfi_refs, -8, ((6 + 4 + 1) * 8)
389
390    // Put nulls in reference frame.
391    testl %ebx, %ebx
392    je 2f
393    movq \refs, %r10
3941:
395    movl $$0, (%r10)
396    addq $$4, %r10
397    cmpq %r10, \fp
398    jne 1b
3992:
400    // Save the ArtMethod.
401    movq %rdi, (%rsp)
402.endm
403
404// Puts the next floating point argument into the expected register,
405// fetching values based on a non-range invoke.
406// Uses rax as temporary.
407//
408// TODO: We could simplify a lot of code by loading the G argument into
409// the "inst" register. Given that we enter the handler with "1(rPC)" in
410// the rINST, we can just add rINST<<16 to the args and we don't even
411// need to pass "arg_index" around.
412.macro LOOP_OVER_SHORTY_LOADING_XMMS xmm_reg, inst, shorty, arg_index, finished
4131: // LOOP
414    movb (REG_VAR(shorty)), %al             // bl := *shorty
415    addq MACRO_LITERAL(1), REG_VAR(shorty)  // shorty++
416    cmpb MACRO_LITERAL(0), %al              // if (al == '\0') goto finished
417    je VAR(finished)
418    cmpb MACRO_LITERAL(68), %al             // if (al == 'D') goto FOUND_DOUBLE
419    je 2f
420    cmpb MACRO_LITERAL(70), %al             // if (al == 'F') goto FOUND_FLOAT
421    je 3f
422    shrq MACRO_LITERAL(4), REG_VAR(inst)
423    addq MACRO_LITERAL(1), REG_VAR(arg_index)
424    //  Handle extra argument in arg array taken by a long.
425    cmpb MACRO_LITERAL(74), %al   // if (al != 'J') goto LOOP
426    jne 1b
427    shrq MACRO_LITERAL(4), REG_VAR(inst)
428    addq MACRO_LITERAL(1), REG_VAR(arg_index)
429    jmp 1b                        // goto LOOP
4302:  // FOUND_DOUBLE
431    subq MACRO_LITERAL(8), %rsp
432    movq REG_VAR(inst), %rax
433    andq MACRO_LITERAL(0xf), %rax
434    GET_VREG %eax, %rax
435    movl %eax, (%rsp)
436    shrq MACRO_LITERAL(4), REG_VAR(inst)
437    addq MACRO_LITERAL(1), REG_VAR(arg_index)
438    cmpq MACRO_LITERAL(4), REG_VAR(arg_index)
439    je 5f
440    movq REG_VAR(inst), %rax
441    andq MACRO_LITERAL(0xf), %rax
442    shrq MACRO_LITERAL(4), REG_VAR(inst)
443    addq MACRO_LITERAL(1), REG_VAR(arg_index)
444    jmp 6f
4455:
446    movzbl 1(rPC), %eax
447    andq MACRO_LITERAL(0xf), %rax
4486:
449    GET_VREG %eax, %rax
450    movl %eax, 4(%rsp)
451    movsd (%rsp), REG_VAR(xmm_reg)
452    addq MACRO_LITERAL(8), %rsp
453    jmp 4f
4543:  // FOUND_FLOAT
455    cmpq MACRO_LITERAL(4), REG_VAR(arg_index)
456    je 7f
457    movq REG_VAR(inst), %rax
458    andq MACRO_LITERAL(0xf), %rax
459    shrq MACRO_LITERAL(4), REG_VAR(inst)
460    addq MACRO_LITERAL(1), REG_VAR(arg_index)
461    jmp 8f
4627:
463    movzbl 1(rPC), %eax
464    andq MACRO_LITERAL(0xf), %rax
4658:
466    GET_VREG_XMMs REG_VAR(xmm_reg), %rax
4674:
468.endm
469
470// Puts the next int/long/object argument in the expected register,
471// fetching values based on a non-range invoke.
472// Uses rax as temporary.
473.macro LOOP_OVER_SHORTY_LOADING_GPRS gpr_reg64, gpr_reg32, inst, shorty, arg_index, finished
4741: // LOOP
475    movb (REG_VAR(shorty)), %al   // al := *shorty
476    addq MACRO_LITERAL(1), REG_VAR(shorty)  // shorty++
477    cmpb MACRO_LITERAL(0), %al    // if (al == '\0') goto finished
478    je  VAR(finished)
479    cmpb MACRO_LITERAL(74), %al   // if (al == 'J') goto FOUND_LONG
480    je 2f
481    cmpb MACRO_LITERAL(70), %al   // if (al == 'F') goto SKIP_FLOAT
482    je 3f
483    cmpb MACRO_LITERAL(68), %al   // if (al == 'D') goto SKIP_DOUBLE
484    je 4f
485    cmpq MACRO_LITERAL(4), REG_VAR(arg_index)
486    je 7f
487    movq REG_VAR(inst), %rax
488    andq MACRO_LITERAL(0xf), %rax
489    shrq MACRO_LITERAL(4), REG_VAR(inst)
490    addq MACRO_LITERAL(1), REG_VAR(arg_index)
491    jmp 8f
4927:
493    movzbl 1(rPC), %eax
494    andq MACRO_LITERAL(0xf), %rax
4958:
496    GET_VREG REG_VAR(gpr_reg32), %rax
497    jmp 5f
4982:  // FOUND_LONG
499    subq MACRO_LITERAL(8), %rsp
500    movq REG_VAR(inst), %rax
501    andq MACRO_LITERAL(0xf), %rax
502    GET_VREG %eax, %rax
503    movl %eax, (%rsp)
504    shrq MACRO_LITERAL(4), REG_VAR(inst)
505    addq MACRO_LITERAL(1), REG_VAR(arg_index)
506    cmpq MACRO_LITERAL(4), REG_VAR(arg_index)
507    je 9f
508    movq REG_VAR(inst), %rax
509    andq MACRO_LITERAL(0xf), %rax
510    shrq MACRO_LITERAL(4), REG_VAR(inst)
511    addq MACRO_LITERAL(1), REG_VAR(arg_index)
512    jmp 10f
5139:
514    movzbl 1(rPC), %eax
515    andq MACRO_LITERAL(0xf), %rax
51610:
517    GET_VREG %eax, %rax
518    movl %eax, 4(%rsp)
519    movq (%rsp), REG_VAR(gpr_reg64)
520    addq MACRO_LITERAL(8), %rsp
521    jmp 5f
5223:  // SKIP_FLOAT
523    shrq MACRO_LITERAL(4), REG_VAR(inst)
524    addq MACRO_LITERAL(1), REG_VAR(arg_index)
525    jmp 1b
5264:  // SKIP_DOUBLE
527    shrq MACRO_LITERAL(4), REG_VAR(inst)
528    addq MACRO_LITERAL(1), REG_VAR(arg_index)
529    cmpq MACRO_LITERAL(4), REG_VAR(arg_index)
530    je 1b
531    shrq MACRO_LITERAL(4), REG_VAR(inst)
532    addq MACRO_LITERAL(1), REG_VAR(arg_index)
533    jmp 1b
5345:
535.endm
536
537// Puts the next floating point argument into the expected register,
538// fetching values based on a range invoke.
539// Uses rax as temporary.
540.macro LOOP_RANGE_OVER_SHORTY_LOADING_XMMS xmm_reg, shorty, arg_index, stack_index, finished
5411: // LOOP
542    movb (REG_VAR(shorty)), %al             // al := *shorty
543    addq MACRO_LITERAL(1), REG_VAR(shorty)  // shorty++
544    cmpb MACRO_LITERAL(0), %al              // if (al == '\0') goto finished
545    je VAR(finished)
546    cmpb MACRO_LITERAL(68), %al             // if (al == 'D') goto FOUND_DOUBLE
547    je 2f
548    cmpb MACRO_LITERAL(70), %al             // if (al == 'F') goto FOUND_FLOAT
549    je 3f
550    addq MACRO_LITERAL(1), REG_VAR(arg_index)
551    addq MACRO_LITERAL(1), REG_VAR(stack_index)
552    //  Handle extra argument in arg array taken by a long.
553    cmpb MACRO_LITERAL(74), %al   // if (al != 'J') goto LOOP
554    jne 1b
555    addq MACRO_LITERAL(1), REG_VAR(arg_index)
556    addq MACRO_LITERAL(1), REG_VAR(stack_index)
557    jmp 1b                        // goto LOOP
5582:  // FOUND_DOUBLE
559    GET_VREG_XMMd REG_VAR(xmm_reg), REG_VAR(arg_index)
560    addq MACRO_LITERAL(2), REG_VAR(arg_index)
561    addq MACRO_LITERAL(2), REG_VAR(stack_index)
562    jmp 4f
5633:  // FOUND_FLOAT
564    GET_VREG_XMMs REG_VAR(xmm_reg), REG_VAR(arg_index)
565    addq MACRO_LITERAL(1), REG_VAR(arg_index)
566    addq MACRO_LITERAL(1), REG_VAR(stack_index)
5674:
568.endm
569
570// Puts the next floating point argument into the expected stack slot,
571// fetching values based on a range invoke.
572// Uses rax as temporary.
573//
574// TODO: We could just copy all the vregs to the stack slots in a simple loop
575// (or REP MOVSD) without looking at the shorty at all. (We could also drop
576// the "stack_index" from the macros for loading registers.) We could also do
577// that conditionally if argument word count > 6; otherwise we know that all
578// args fit into registers.
579.macro LOOP_RANGE_OVER_FPs shorty, arg_index, stack_index, finished
5801: // LOOP
581    movb (REG_VAR(shorty)), %al             // bl := *shorty
582    addq MACRO_LITERAL(1), REG_VAR(shorty)  // shorty++
583    cmpb MACRO_LITERAL(0), %al              // if (al == '\0') goto finished
584    je VAR(finished)
585    cmpb MACRO_LITERAL(68), %al             // if (al == 'D') goto FOUND_DOUBLE
586    je 2f
587    cmpb MACRO_LITERAL(70), %al             // if (al == 'F') goto FOUND_FLOAT
588    je 3f
589    addq MACRO_LITERAL(1), REG_VAR(arg_index)
590    addq MACRO_LITERAL(1), REG_VAR(stack_index)
591    //  Handle extra argument in arg array taken by a long.
592    cmpb MACRO_LITERAL(74), %al   // if (al != 'J') goto LOOP
593    jne 1b
594    addq MACRO_LITERAL(1), REG_VAR(arg_index)
595    addq MACRO_LITERAL(1), REG_VAR(stack_index)
596    jmp 1b                        // goto LOOP
5972:  // FOUND_DOUBLE
598    movq (rFP, REG_VAR(arg_index), 4), %rax
599    movq %rax, 8(%rsp, REG_VAR(stack_index), 4)
600    addq MACRO_LITERAL(2), REG_VAR(arg_index)
601    addq MACRO_LITERAL(2), REG_VAR(stack_index)
602    jmp 1b
6033:  // FOUND_FLOAT
604    movl (rFP, REG_VAR(arg_index), 4), %eax
605    movl %eax, 8(%rsp, REG_VAR(stack_index), 4)
606    addq MACRO_LITERAL(1), REG_VAR(arg_index)
607    addq MACRO_LITERAL(1), REG_VAR(stack_index)
608    jmp 1b
609.endm
610
611// Puts the next int/long/object argument in the expected register,
612// fetching values based on a range invoke.
613// Uses rax as temporary.
614.macro LOOP_RANGE_OVER_SHORTY_LOADING_GPRS gpr_reg64, gpr_reg32, shorty, arg_index, stack_index, finished
6151: // LOOP
616    movb (REG_VAR(shorty)), %al             // al := *shorty
617    addq MACRO_LITERAL(1), REG_VAR(shorty)  // shorty++
618    cmpb MACRO_LITERAL(0), %al    // if (al == '\0') goto finished
619    je  VAR(finished)
620    cmpb MACRO_LITERAL(74), %al   // if (al == 'J') goto FOUND_LONG
621    je 2f
622    cmpb MACRO_LITERAL(70), %al   // if (al == 'F') goto SKIP_FLOAT
623    je 3f
624    cmpb MACRO_LITERAL(68), %al   // if (al == 'D') goto SKIP_DOUBLE
625    je 4f
626    movl       (rFP, REG_VAR(arg_index), 4), REG_VAR(gpr_reg32)
627    addq MACRO_LITERAL(1), REG_VAR(arg_index)
628    addq MACRO_LITERAL(1), REG_VAR(stack_index)
629    jmp 5f
6302:  // FOUND_LONG
631    movq (rFP, REG_VAR(arg_index), 4), REG_VAR(gpr_reg64)
632    addq MACRO_LITERAL(2), REG_VAR(arg_index)
633    addq MACRO_LITERAL(2), REG_VAR(stack_index)
634    jmp 5f
6353:  // SKIP_FLOAT
636    addq MACRO_LITERAL(1), REG_VAR(arg_index)
637    addq MACRO_LITERAL(1), REG_VAR(stack_index)
638    jmp 1b
6394:  // SKIP_DOUBLE
640    addq MACRO_LITERAL(2), REG_VAR(arg_index)
641    addq MACRO_LITERAL(2), REG_VAR(stack_index)
642    jmp 1b
6435:
644.endm
645
646// Puts the next int/long/object argument in the expected stack slot,
647// fetching values based on a range invoke.
648// Uses rax as temporary.
649.macro LOOP_RANGE_OVER_INTs shorty, arg_index, stack_index, finished
6501: // LOOP
651    movb (REG_VAR(shorty)), %al             // al := *shorty
652    addq MACRO_LITERAL(1), REG_VAR(shorty)  // shorty++
653    cmpb MACRO_LITERAL(0), %al    // if (al == '\0') goto finished
654    je  VAR(finished)
655    cmpb MACRO_LITERAL(74), %al   // if (al == 'J') goto FOUND_LONG
656    je 2f
657    cmpb MACRO_LITERAL(70), %al   // if (al == 'F') goto SKIP_FLOAT
658    je 3f
659    cmpb MACRO_LITERAL(68), %al   // if (al == 'D') goto SKIP_DOUBLE
660    je 4f
661    movl (rFP, REG_VAR(arg_index), 4), %eax
662    movl %eax, 8(%rsp, REG_VAR(stack_index), 4)
663    addq MACRO_LITERAL(1), REG_VAR(arg_index)
664    addq MACRO_LITERAL(1), REG_VAR(stack_index)
665    jmp 1b
6662:  // FOUND_LONG
667    movq (rFP, REG_VAR(arg_index), 4), %rax
668    movq %rax, 8(%rsp, REG_VAR(stack_index), 4)
669    addq MACRO_LITERAL(2), REG_VAR(arg_index)
670    addq MACRO_LITERAL(2), REG_VAR(stack_index)
671    jmp 1b
6723:  // SKIP_FLOAT
673    addq MACRO_LITERAL(1), REG_VAR(arg_index)
674    addq MACRO_LITERAL(1), REG_VAR(stack_index)
675    jmp 1b
6764:  // SKIP_DOUBLE
677    addq MACRO_LITERAL(2), REG_VAR(arg_index)
678    addq MACRO_LITERAL(2), REG_VAR(stack_index)
679    jmp 1b
680.endm
681
682// Puts the next floating point parameter passed in physical register
683// in the expected dex register array entry.
684// Uses rax as temporary.
685.macro LOOP_OVER_SHORTY_STORING_XMMS xmm_reg, shorty, arg_index, fp, finished
6861: // LOOP
687    movb (REG_VAR(shorty)), %al             // al := *shorty
688    addq MACRO_LITERAL(1), REG_VAR(shorty)  // shorty++
689    cmpb MACRO_LITERAL(0), %al              // if (al == '\0') goto finished
690    je VAR(finished)
691    cmpb MACRO_LITERAL(68), %al             // if (al == 'D') goto FOUND_DOUBLE
692    je 2f
693    cmpb MACRO_LITERAL(70), %al             // if (al == 'F') goto FOUND_FLOAT
694    je 3f
695    addq MACRO_LITERAL(1), REG_VAR(arg_index)
696    //  Handle extra argument in arg array taken by a long.
697    cmpb MACRO_LITERAL(74), %al   // if (al != 'J') goto LOOP
698    jne 1b
699    addq MACRO_LITERAL(1), REG_VAR(arg_index)
700    jmp 1b                        // goto LOOP
7012:  // FOUND_DOUBLE
702    movsd REG_VAR(xmm_reg),(REG_VAR(fp), REG_VAR(arg_index), 4)
703    addq MACRO_LITERAL(2), REG_VAR(arg_index)
704    jmp 4f
7053:  // FOUND_FLOAT
706    movss REG_VAR(xmm_reg), (REG_VAR(fp), REG_VAR(arg_index), 4)
707    addq MACRO_LITERAL(1), REG_VAR(arg_index)
7084:
709.endm
710
711// Puts the next int/long/object parameter passed in physical register
712// in the expected dex register array entry, and in case of object in the
713// expected reference array entry.
714// Uses rax as temporary.
715.macro LOOP_OVER_SHORTY_STORING_GPRS gpr_reg64, gpr_reg32, shorty, arg_index, regs, refs, finished
7161: // LOOP
717    movb (REG_VAR(shorty)), %al             // al := *shorty
718    addq MACRO_LITERAL(1), REG_VAR(shorty)  // shorty++
719    cmpb MACRO_LITERAL(0), %al    // if (al == '\0') goto finished
720    je  VAR(finished)
721    cmpb MACRO_LITERAL(74), %al   // if (al == 'J') goto FOUND_LONG
722    je 2f
723    cmpb MACRO_LITERAL(70), %al   // if (al == 'F') goto SKIP_FLOAT
724    je 3f
725    cmpb MACRO_LITERAL(68), %al   // if (al == 'D') goto SKIP_DOUBLE
726    je 4f
727    movl REG_VAR(gpr_reg32), (REG_VAR(regs), REG_VAR(arg_index), 4)
728    cmpb MACRO_LITERAL(76), %al   // if (al != 'L') goto NOT_REFERENCE
729    jne 6f
730    movl REG_VAR(gpr_reg32), (REG_VAR(refs), REG_VAR(arg_index), 4)
7316:  // NOT_REFERENCE
732    addq MACRO_LITERAL(1), REG_VAR(arg_index)
733    jmp 5f
7342:  // FOUND_LONG
735    movq REG_VAR(gpr_reg64), (REG_VAR(regs), REG_VAR(arg_index), 4)
736    addq MACRO_LITERAL(2), REG_VAR(arg_index)
737    jmp 5f
7383:  // SKIP_FLOAT
739    addq MACRO_LITERAL(1), REG_VAR(arg_index)
740    jmp 1b
7414:  // SKIP_DOUBLE
742    addq MACRO_LITERAL(2), REG_VAR(arg_index)
743    jmp 1b
7445:
745.endm
746
747// Puts the next floating point parameter passed in stack
748// in the expected dex register array entry.
749// Uses rax as temporary.
750//
751// TODO: Or we could just spill regs to the reserved slots in the caller's
752// frame and copy all regs in a simple loop. This time, however, we would
753// need to look at the shorty anyway to look for the references.
754// (The trade-off is different for passing arguments and receiving them.)
755.macro LOOP_OVER_FPs shorty, arg_index, regs, stack_ptr, finished
7561: // LOOP
757    movb (REG_VAR(shorty)), %al             // al := *shorty
758    addq MACRO_LITERAL(1), REG_VAR(shorty)  // shorty++
759    cmpb MACRO_LITERAL(0), %al              // if (al == '\0') goto finished
760    je VAR(finished)
761    cmpb MACRO_LITERAL(68), %al             // if (al == 'D') goto FOUND_DOUBLE
762    je 2f
763    cmpb MACRO_LITERAL(70), %al             // if (al == 'F') goto FOUND_FLOAT
764    je 3f
765    addq MACRO_LITERAL(1), REG_VAR(arg_index)
766    //  Handle extra argument in arg array taken by a long.
767    cmpb MACRO_LITERAL(74), %al   // if (al != 'J') goto LOOP
768    jne 1b
769    addq MACRO_LITERAL(1), REG_VAR(arg_index)
770    jmp 1b                        // goto LOOP
7712:  // FOUND_DOUBLE
772    movq OFFSET_TO_FIRST_ARGUMENT_IN_STACK(REG_VAR(stack_ptr), REG_VAR(arg_index), 4), %rax
773    movq %rax, (REG_VAR(regs), REG_VAR(arg_index), 4)
774    addq MACRO_LITERAL(2), REG_VAR(arg_index)
775    jmp 1b
7763:  // FOUND_FLOAT
777    movl OFFSET_TO_FIRST_ARGUMENT_IN_STACK(REG_VAR(stack_ptr), REG_VAR(arg_index), 4), %eax
778    movl %eax, (REG_VAR(regs), REG_VAR(arg_index), 4)
779    addq MACRO_LITERAL(1), REG_VAR(arg_index)
780    jmp 1b
781.endm
782
783// Puts the next int/long/object parameter passed in stack
784// in the expected dex register array entry, and in case of object in the
785// expected reference array entry.
786// Uses rax as temporary.
787.macro LOOP_OVER_INTs shorty, arg_index, regs, refs, stack_ptr, finished
7881: // LOOP
789    movb (REG_VAR(shorty)), %al             // al := *shorty
790    addq MACRO_LITERAL(1), REG_VAR(shorty)  // shorty++
791    cmpb MACRO_LITERAL(0), %al    // if (al == '\0') goto finished
792    je  VAR(finished)
793    cmpb MACRO_LITERAL(74), %al   // if (al == 'J') goto FOUND_LONG
794    je 2f
795    cmpb MACRO_LITERAL(76), %al   // if (al == 'L') goto FOUND_REFERENCE
796    je 6f
797    cmpb MACRO_LITERAL(70), %al   // if (al == 'F') goto SKIP_FLOAT
798    je 3f
799    cmpb MACRO_LITERAL(68), %al   // if (al == 'D') goto SKIP_DOUBLE
800    je 4f
801    movl OFFSET_TO_FIRST_ARGUMENT_IN_STACK(REG_VAR(stack_ptr), REG_VAR(arg_index), 4), %eax
802    movl %eax, (REG_VAR(regs), REG_VAR(arg_index), 4)
803    addq MACRO_LITERAL(1), REG_VAR(arg_index)
804    jmp 1b
8056:  // FOUND_REFERENCE
806    movl OFFSET_TO_FIRST_ARGUMENT_IN_STACK(REG_VAR(stack_ptr), REG_VAR(arg_index), 4), %eax
807    movl %eax, (REG_VAR(regs), REG_VAR(arg_index), 4)
808    movl %eax, (REG_VAR(refs), REG_VAR(arg_index), 4)
809    addq MACRO_LITERAL(1), REG_VAR(arg_index)
810    jmp 1b
8112:  // FOUND_LONG
812    movq OFFSET_TO_FIRST_ARGUMENT_IN_STACK(REG_VAR(stack_ptr), REG_VAR(arg_index), 4), %rax
813    movq %rax, (REG_VAR(regs), REG_VAR(arg_index), 4)
814    addq MACRO_LITERAL(2), REG_VAR(arg_index)
815    jmp 1b
8163:  // SKIP_FLOAT
817    addq MACRO_LITERAL(1), REG_VAR(arg_index)
818    jmp 1b
8194:  // SKIP_DOUBLE
820    addq MACRO_LITERAL(2), REG_VAR(arg_index)
821    jmp 1b
822.endm
823
824// Increase method hotness and do suspend check before starting executing the method.
825.macro START_EXECUTING_INSTRUCTIONS
826   movq (%rsp), %rdi
827   movzwl ART_METHOD_HOTNESS_COUNT_OFFSET(%rdi), %esi
828#if (NTERP_HOTNESS_VALUE != 0)
829#error Expected 0 for hotness value
830#endif
831   // If the counter is at zero, handle this in the runtime.
832   testl %esi, %esi
833   je 3f
834   // Update counter.
835   addl $$-1, %esi
836   movw %si, ART_METHOD_HOTNESS_COUNT_OFFSET(%rdi)
8371:
838   DO_SUSPEND_CHECK continue_label=2f
8392:
840   FETCH_INST
841   GOTO_NEXT
8423:
843   CHECK_AND_UPDATE_SHARED_MEMORY_METHOD if_hot=4f, if_not_hot=1b
8444:
845   movq $$0, %rsi
846   movq rFP, %rdx
847   call nterp_hot_method
848   jmp 2b
849.endm
850
851.macro SPILL_ALL_CALLEE_SAVES
852    PUSH r15
853    PUSH r14
854    PUSH r13
855    PUSH r12
856    PUSH rbp
857    PUSH rbx
858    SETUP_FP_CALLEE_SAVE_FRAME
859.endm
860
861.macro RESTORE_ALL_CALLEE_SAVES
862    RESTORE_FP_CALLEE_SAVE_FRAME
863    POP rbx
864    POP rbp
865    POP r12
866    POP r13
867    POP r14
868    POP r15
869.endm
870
871// Helper to setup the stack after doing a nterp to nterp call. This will setup:
872// - rNEW_FP: the new pointer to dex registers
873// - rNEW_REFS: the new pointer to references
874// - rPC: the new PC pointer to execute
875// - edi: number of arguments
876// - ecx: first dex register
877//
878// This helper expects:
879// - rax to contain the code item
880.macro SETUP_STACK_FOR_INVOKE
881   // We do the same stack overflow check as the compiler. See CanMethodUseNterp
882   // in how we limit the maximum nterp frame size.
883   testq %rax, -STACK_OVERFLOW_RESERVED_BYTES(%rsp)
884
885   // Spill all callee saves to have a consistent stack frame whether we
886   // are called by compiled code or nterp.
887   SPILL_ALL_CALLEE_SAVES
888
889   // Setup the frame.
890   SETUP_STACK_FRAME %rax, rNEW_REFS, rNEW_REFS32, rNEW_FP, CFI_NEW_REFS, load_ins=0
891   // Make r11 point to the top of the dex register array.
892   leaq (rNEW_FP, %rbx, 4), %r11
893
894   // Fetch instruction information before replacing rPC.
895   movzbl 1(rPC), %edi
896   movzwl 4(rPC), %ecx
897
898   // Set the dex pc pointer.
899   movq %rax, rPC
900   CFI_DEFINE_DEX_PC_WITH_OFFSET(CFI_TMP, CFI_DEX, 0)
901.endm
902
903// Setup arguments based on a non-range nterp to nterp call, and start executing
904// the method. We expect:
905// - rNEW_FP: the new pointer to dex registers
906// - rNEW_REFS: the new pointer to references
907// - rPC: the new PC pointer to execute
908// - edi: number of arguments
909// - ecx: first dex register
910// - r11: top of dex register array
911// - esi: receiver if non-static.
912.macro SETUP_NON_RANGE_ARGUMENTS_AND_EXECUTE is_static=0, is_string_init=0
913   // Now all temporary registers (except r11 containing top of registers array)
914   // are available, copy the parameters.
915   // /* op vA, vB, {vC...vG} */
916   movl %edi, %eax
917   shrl $$4, %eax # Number of arguments
918   jz 6f  # shl sets the Z flag
919   movq MACRO_LITERAL(-1), %r10
920   cmpl MACRO_LITERAL(2), %eax
921   jl 1f
922   je 2f
923   cmpl MACRO_LITERAL(4), %eax
924   jl 3f
925   je 4f
926
927  // We use a decrementing r10 to store references relative
928  // to rNEW_FP and dex registers relative to r11.
929  //
930  // TODO: We could set up r10 as the number of registers (this can be an additional output from
931  // SETUP_STACK_FOR_INVOKE) and then just decrement it by one before copying each arg to
932  // (rNEW_FP, r10, 4) and (rNEW_REFS, r10, 4).
933  // Maybe even introduce macros NEW_VREG_ADDRESS/NEW_VREG_REF_ADDRESS.
9345:
935   andq        MACRO_LITERAL(15), %rdi
936   GET_VREG_OBJECT %edx, %rdi
937   movl        %edx, (rNEW_FP, %r10, 4)
938   GET_VREG    %edx, %rdi
939   movl        %edx, (%r11, %r10, 4)
940   subq        MACRO_LITERAL(1), %r10
9414:
942   movl        %ecx, %eax
943   shrl        MACRO_LITERAL(12), %eax
944   GET_VREG_OBJECT %edx, %rax
945   movl        %edx, (rNEW_FP, %r10, 4)
946   GET_VREG    %edx, %rax
947   movl        %edx, (%r11, %r10, 4)
948   subq        MACRO_LITERAL(1), %r10
9493:
950   movl        %ecx, %eax
951   shrl        MACRO_LITERAL(8), %eax
952   andl        MACRO_LITERAL(0xf), %eax
953   GET_VREG_OBJECT %edx, %rax
954   movl        %edx, (rNEW_FP, %r10, 4)
955   GET_VREG    %edx, %rax
956   movl        %edx, (%r11, %r10, 4)
957   subq        MACRO_LITERAL(1), %r10
9582:
959   movl        %ecx, %eax
960   shrl        MACRO_LITERAL(4), %eax
961   andl        MACRO_LITERAL(0xf), %eax
962   GET_VREG_OBJECT %edx, %rax
963   movl        %edx, (rNEW_FP, %r10, 4)
964   GET_VREG    %edx, %rax
965   movl        %edx, (%r11, %r10, 4)
966   subq        MACRO_LITERAL(1), %r10
9671:
968   .if \is_string_init
969   // Ignore the first argument
970   .elseif \is_static
971   movl        %ecx, %eax
972   andq        MACRO_LITERAL(0x000f), %rax
973   GET_VREG_OBJECT %edx, %rax
974   movl        %edx, (rNEW_FP, %r10, 4)
975   GET_VREG    %edx, %rax
976   movl        %edx, (%r11, %r10, 4)
977   .else
978   movl        %esi, (rNEW_FP, %r10, 4)
979   movl        %esi, (%r11, %r10, 4)
980   .endif
981
9826:
983   // Start executing the method.
984   movq rNEW_FP, rFP
985   movq rNEW_REFS, rREFS
986   CFI_DEF_CFA_BREG_PLUS_UCONST CFI_REFS, -8, ((6 + 4 + 1) * 8)
987   START_EXECUTING_INSTRUCTIONS
988.endm
989
990// Setup arguments based on a range nterp to nterp call, and start executing
991// the method.
992.macro SETUP_RANGE_ARGUMENTS_AND_EXECUTE is_static=0, is_string_init=0
993   // edi is number of arguments
994   // ecx is first register
995   movq MACRO_LITERAL(-4), %r10
996   .if \is_string_init
997   // Ignore the first argument
998   subl $$1, %edi
999   addl $$1, %ecx
1000   .elseif !\is_static
1001   subl $$1, %edi
1002   addl $$1, %ecx
1003   .endif
1004
1005   testl %edi, %edi
1006   je 2f
1007   leaq  (rREFS, %rcx, 4), %rax  # pointer to first argument in reference array
1008   leaq  (%rax, %rdi, 4), %rax   # pointer to last argument in reference array
1009   leaq  (rFP, %rcx, 4), %rcx    # pointer to first argument in register array
1010   leaq  (%rcx, %rdi, 4), %rdi   # pointer to last argument in register array
1011   // TODO: Same comment for copying arguments as in SETUP_NON_RANGE_ARGUMENTS_AND_EXECUTE.
10121:
1013   movl  -4(%rax), %edx
1014   movl  %edx, (rNEW_FP, %r10, 1)
1015   movl  -4(%rdi), %edx
1016   movl  %edx, (%r11, %r10, 1)
1017   subq  MACRO_LITERAL(4), %r10
1018   subq  MACRO_LITERAL(4), %rax
1019   subq  MACRO_LITERAL(4), %rdi
1020   cmpq  %rcx, %rdi
1021   jne 1b
1022
10232:
1024   .if \is_string_init
1025   // Ignore first argument
1026   .elseif !\is_static
1027   movl        %esi, (rNEW_FP, %r10, 1)
1028   movl        %esi, (%r11, %r10, 1)
1029   .endif
1030   movq rNEW_FP, rFP
1031   movq rNEW_REFS, rREFS
1032   CFI_DEF_CFA_BREG_PLUS_UCONST CFI_REFS, -8, ((6 + 4 + 1) * 8)
1033   START_EXECUTING_INSTRUCTIONS
1034.endm
1035
1036.macro GET_SHORTY dest, is_interface, is_polymorphic, is_custom
1037   push %rdi
1038   push %rsi
1039   .if \is_polymorphic
1040   movq 16(%rsp), %rdi
1041   movq rPC, %rsi
1042   call SYMBOL(NterpGetShortyFromInvokePolymorphic)
1043   .elseif \is_custom
1044   movq 16(%rsp), %rdi
1045   movq rPC, %rsi
1046   call SYMBOL(NterpGetShortyFromInvokeCustom)
1047   .elseif \is_interface
1048   movq 16(%rsp), %rdi
1049   movzwl 2(rPC), %esi
1050   call SYMBOL(NterpGetShortyFromMethodId)
1051   .else
1052   call SYMBOL(NterpGetShorty)
1053   .endif
1054   pop %rsi
1055   pop %rdi
1056   movq %rax, \dest
1057.endm
1058
1059.macro GET_SHORTY_SLOW_PATH dest, is_interface
1060   // Save all registers that can hold arguments in the fast path.
1061   push %rdi
1062   push %rsi
1063   push %rdx
1064   subq MACRO_LITERAL(8), %rsp
1065   mov %xmm0, (%rsp)
1066   .if \is_interface
1067   movq 32(%rsp), %rdi
1068   movzwl 2(rPC), %esi
1069   call SYMBOL(NterpGetShortyFromMethodId)
1070   .else
1071   call SYMBOL(NterpGetShorty)
1072   .endif
1073   mov (%rsp), %xmm0
1074   addq MACRO_LITERAL(8), %rsp
1075   pop %rdx
1076   pop %rsi
1077   pop %rdi
1078   movq %rax, \dest
1079.endm
1080
1081// Uses r9 as temporary.
1082.macro DO_ENTRY_POINT_CHECK call_compiled_code
1083   // On entry, the method is %rdi, the instance is %rsi
1084   leaq ExecuteNterpImpl(%rip), %r9
1085   cmpq %r9, ART_METHOD_QUICK_CODE_OFFSET_64(%rdi)
1086   jne  VAR(call_compiled_code)
1087
1088   movq ART_METHOD_DATA_OFFSET_64(%rdi), %rax
1089.endm
1090
1091// Uses r9 and r10 as temporary
1092.macro UPDATE_REGISTERS_FOR_STRING_INIT old_value, new_value
1093   movq rREFS, %r9
1094   movq rFP, %r10
10951:
1096   cmpl (%r9), \old_value
1097   jne 2f
1098   movl \new_value, (%r9)
1099   movl \new_value, (%r10)
11002:
1101   addq $$4, %r9
1102   addq $$4, %r10
1103   cmpq %r9, rFP
1104   jne 1b
1105.endm
1106
1107.macro COMMON_INVOKE_NON_RANGE is_static=0, is_interface=0, suffix="", is_string_init=0, is_polymorphic=0, is_custom=0
1108   .if \is_polymorphic
1109   // We always go to compiled code for polymorphic calls.
1110   .elseif \is_custom
1111   // We always go to compiled code for custom calls.
1112   .else
1113     DO_ENTRY_POINT_CHECK .Lcall_compiled_code_\suffix
1114     .if \is_string_init
1115     call nterp_to_nterp_string_init_non_range
1116     .elseif \is_static
1117     call nterp_to_nterp_static_non_range
1118     .else
1119     call nterp_to_nterp_instance_non_range
1120     .endif
1121     jmp .Ldone_return_\suffix
1122   .endif
1123
1124.Lcall_compiled_code_\suffix:
1125   .if \is_polymorphic
1126   // No fast path for polymorphic calls.
1127   .elseif \is_custom
1128   // No fast path for custom calls.
1129   .elseif \is_string_init
1130   // No fast path for string.init.
1131   .else
1132     testl $$ART_METHOD_NTERP_INVOKE_FAST_PATH_FLAG, ART_METHOD_ACCESS_FLAGS_OFFSET(%rdi)
1133     je .Lfast_path_with_few_args_\suffix
1134     movzbl 1(rPC), %r9d
1135     movl %r9d, %ebp
1136     shrl MACRO_LITERAL(4), %ebp # Number of arguments
1137     .if \is_static
1138     jz .Linvoke_fast_path_\suffix  # shl sets the Z flag
1139     .else
1140     cmpl MACRO_LITERAL(1), %ebp
1141     je .Linvoke_fast_path_\suffix
1142     .endif
1143     movzwl 4(rPC), %r11d
1144     cmpl MACRO_LITERAL(2), %ebp
1145     .if \is_static
1146     jl .Lone_arg_fast_path_\suffix
1147     .endif
1148     je .Ltwo_args_fast_path_\suffix
1149     cmpl MACRO_LITERAL(4), %ebp
1150     jl .Lthree_args_fast_path_\suffix
1151     je .Lfour_args_fast_path_\suffix
1152
1153     andl        MACRO_LITERAL(0xf), %r9d
1154     GET_VREG    %r9d, %r9
1155.Lfour_args_fast_path_\suffix:
1156     movl        %r11d, %r8d
1157     shrl        MACRO_LITERAL(12), %r8d
1158     GET_VREG    %r8d, %r8
1159.Lthree_args_fast_path_\suffix:
1160     movl        %r11d, %ecx
1161     shrl        MACRO_LITERAL(8), %ecx
1162     andl        MACRO_LITERAL(0xf), %ecx
1163     GET_VREG    %ecx, %rcx
1164.Ltwo_args_fast_path_\suffix:
1165     movl        %r11d, %edx
1166     shrl        MACRO_LITERAL(4), %edx
1167     andl        MACRO_LITERAL(0xf), %edx
1168     GET_VREG    %edx, %rdx
1169.Lone_arg_fast_path_\suffix:
1170     .if \is_static
1171     andl        MACRO_LITERAL(0xf), %r11d
1172     GET_VREG    %esi, %r11
1173     .else
1174     // First argument already in %esi.
1175     .endif
1176.Linvoke_fast_path_\suffix:
1177     call *ART_METHOD_QUICK_CODE_OFFSET_64(%rdi) // Call the method.
1178     ADVANCE_PC_FETCH_AND_GOTO_NEXT 3
1179
1180.Lfast_path_with_few_args_\suffix:
1181     // Fast path when we have zero or one argument (modulo 'this'). If there
1182     // is one argument, we can put it in both floating point and core register.
1183     movzbl 1(rPC), %r9d
1184     shrl MACRO_LITERAL(4), %r9d # Number of arguments
1185     .if \is_static
1186     cmpl MACRO_LITERAL(1), %r9d
1187     jl .Linvoke_with_few_args_\suffix
1188     jne .Lget_shorty_\suffix
1189     movzwl 4(rPC), %r9d
1190     andl MACRO_LITERAL(0xf), %r9d  // dex register of first argument
1191     GET_VREG %esi, %r9
1192     movd %esi, %xmm0
1193     .else
1194     cmpl MACRO_LITERAL(2), %r9d
1195     jl .Linvoke_with_few_args_\suffix
1196     jne .Lget_shorty_\suffix
1197     movzwl 4(rPC), %r9d
1198     shrl MACRO_LITERAL(4), %r9d
1199     andl MACRO_LITERAL(0xf), %r9d  // dex register of second argument
1200     GET_VREG %edx, %r9
1201     movd %edx, %xmm0
1202     .endif
1203.Linvoke_with_few_args_\suffix:
1204     // Check if the next instruction is move-result or move-result-wide.
1205     // If it is, we fetch the shorty and jump to the regular invocation.
1206     movzwq  6(rPC), %r9
1207     andl MACRO_LITERAL(0xfe), %r9d
1208     cmpl MACRO_LITERAL(0x0a), %r9d
1209     je .Lget_shorty_and_invoke_\suffix
1210     call *ART_METHOD_QUICK_CODE_OFFSET_64(%rdi) // Call the method.
1211     ADVANCE_PC_FETCH_AND_GOTO_NEXT 3
1212.Lget_shorty_and_invoke_\suffix:
1213     .if \is_interface
1214     // Save interface method, used for conflict resolution, in a callee-save register.
1215     movq %rax, %xmm12
1216     .endif
1217     GET_SHORTY_SLOW_PATH rINSTq, \is_interface
1218     jmp .Lgpr_setup_finished_\suffix
1219   .endif
1220
1221.Lget_shorty_\suffix:
1222   .if \is_interface
1223   // Save interface method, used for conflict resolution, in a callee-save register.
1224   movq %rax, %xmm12
1225   .endif
1226   GET_SHORTY rINSTq, \is_interface, \is_polymorphic, \is_custom
1227   // From this point:
1228   // - rISNTq contains shorty (in callee-save to switch over return value after call).
1229   // - rdi contains method
1230   // - rsi contains 'this' pointer for instance method.
1231   leaq 1(rINSTq), %r9  // shorty + 1  ; ie skip return arg character
1232   movzwl 4(rPC), %r11d // arguments
1233   .if \is_string_init
1234   shrq MACRO_LITERAL(4), %r11
1235   movq $$1, %r10       // ignore first argument
1236   .elseif \is_static
1237   movq $$0, %r10       // arg_index
1238   .else
1239   shrq MACRO_LITERAL(4), %r11
1240   movq $$1, %r10       // arg_index
1241   .endif
1242   LOOP_OVER_SHORTY_LOADING_XMMS xmm0, r11, r9, r10, .Lxmm_setup_finished_\suffix
1243   LOOP_OVER_SHORTY_LOADING_XMMS xmm1, r11, r9, r10, .Lxmm_setup_finished_\suffix
1244   LOOP_OVER_SHORTY_LOADING_XMMS xmm2, r11, r9, r10, .Lxmm_setup_finished_\suffix
1245   LOOP_OVER_SHORTY_LOADING_XMMS xmm3, r11, r9, r10, .Lxmm_setup_finished_\suffix
1246   LOOP_OVER_SHORTY_LOADING_XMMS xmm4, r11, r9, r10, .Lxmm_setup_finished_\suffix
1247.Lxmm_setup_finished_\suffix:
1248   leaq 1(rINSTq), %r9  // shorty + 1  ; ie skip return arg character
1249   movzwl 4(rPC), %r11d // arguments
1250   .if \is_string_init
1251   movq $$1, %r10       // ignore first argument
1252   shrq MACRO_LITERAL(4), %r11
1253   LOOP_OVER_SHORTY_LOADING_GPRS rsi, esi, r11, r9, r10, .Lgpr_setup_finished_\suffix
1254   .elseif \is_static
1255   movq $$0, %r10       // arg_index
1256   LOOP_OVER_SHORTY_LOADING_GPRS rsi, esi, r11, r9, r10, .Lgpr_setup_finished_\suffix
1257   .else
1258   shrq MACRO_LITERAL(4), %r11
1259   movq $$1, %r10       // arg_index
1260   .endif
1261   LOOP_OVER_SHORTY_LOADING_GPRS rdx, edx, r11, r9, r10, .Lgpr_setup_finished_\suffix
1262   LOOP_OVER_SHORTY_LOADING_GPRS rcx, ecx, r11, r9, r10, .Lgpr_setup_finished_\suffix
1263   LOOP_OVER_SHORTY_LOADING_GPRS r8, r8d, r11, r9, r10, .Lgpr_setup_finished_\suffix
1264   LOOP_OVER_SHORTY_LOADING_GPRS r9, r9d, r11, r9, r10, .Lgpr_setup_finished_\suffix
1265.Lgpr_setup_finished_\suffix:
1266   .if \is_polymorphic
1267   call SYMBOL(art_quick_invoke_polymorphic)
1268   .elseif \is_custom
1269   call SYMBOL(art_quick_invoke_custom)
1270   .else
1271      .if \is_interface
1272      movq %xmm12, %rax
1273      .endif
1274      call *ART_METHOD_QUICK_CODE_OFFSET_64(%rdi) // Call the method.
1275   .endif
1276   cmpb LITERAL(68), (rINSTq)       // Test if result type char == 'D'.
1277   je .Lreturn_double_\suffix
1278   cmpb LITERAL(70), (rINSTq)       // Test if result type char == 'F'.
1279   jne .Ldone_return_\suffix
1280.Lreturn_float_\suffix:
1281   movd %xmm0, %eax
1282   jmp .Ldone_return_\suffix
1283.Lreturn_double_\suffix:
1284   movq %xmm0, %rax
1285.Ldone_return_\suffix:
1286   /* resume execution of caller */
1287   .if \is_string_init
1288   movzwl 4(rPC), %r11d // arguments
1289   andq $$0xf, %r11
1290   GET_VREG %esi, %r11
1291   UPDATE_REGISTERS_FOR_STRING_INIT %esi, %eax
1292   .endif
1293
1294   .if \is_polymorphic
1295   ADVANCE_PC_FETCH_AND_GOTO_NEXT 4
1296   .else
1297   ADVANCE_PC_FETCH_AND_GOTO_NEXT 3
1298   .endif
1299.endm
1300
1301.macro COMMON_INVOKE_RANGE is_static=0, is_interface=0, suffix="", is_string_init=0, is_polymorphic=0, is_custom=0
1302   .if \is_polymorphic
1303   // We always go to compiled code for polymorphic calls.
1304   .elseif \is_custom
1305   // We always go to compiled code for custom calls.
1306   .else
1307     DO_ENTRY_POINT_CHECK .Lcall_compiled_code_range_\suffix
1308     .if \is_string_init
1309     call nterp_to_nterp_string_init_range
1310     .elseif \is_static
1311     call nterp_to_nterp_static_range
1312     .else
1313     call nterp_to_nterp_instance_range
1314     .endif
1315     jmp .Ldone_return_range_\suffix
1316   .endif
1317
1318.Lcall_compiled_code_range_\suffix:
1319   .if \is_polymorphic
1320   // No fast path for polymorphic calls.
1321   .elseif \is_custom
1322   // No fast path for custom calls.
1323   .elseif \is_string_init
1324   // No fast path for string.init.
1325   .else
1326     testl $$ART_METHOD_NTERP_INVOKE_FAST_PATH_FLAG, ART_METHOD_ACCESS_FLAGS_OFFSET(%rdi)
1327     je .Lfast_path_with_few_args_range_\suffix
1328     movzbl 1(rPC), %r9d  // number of arguments
1329     .if \is_static
1330     testl %r9d, %r9d
1331     je .Linvoke_fast_path_range_\suffix
1332     .else
1333     cmpl MACRO_LITERAL(1), %r9d
1334     je .Linvoke_fast_path_range_\suffix
1335     .endif
1336     movzwl 4(rPC), %r11d  // dex register of first argument
1337     leaq (rFP, %r11, 4), %r11  // location of first dex register value
1338     cmpl MACRO_LITERAL(2), %r9d
1339     .if \is_static
1340     jl .Lone_arg_fast_path_range_\suffix
1341     .endif
1342     je .Ltwo_args_fast_path_range_\suffix
1343     cmp MACRO_LITERAL(4), %r9d
1344     jl .Lthree_args_fast_path_range_\suffix
1345     je .Lfour_args_fast_path_range_\suffix
1346     cmp MACRO_LITERAL(5), %r9d
1347     je .Lfive_args_fast_path_range_\suffix
1348
1349.Lloop_over_fast_path_range_\suffix:
1350     subl MACRO_LITERAL(1), %r9d
1351     movl (%r11, %r9, 4), %r8d
1352     movl %r8d, 8(%rsp, %r9, 4)  // Add 8 for the ArtMethod
1353     cmpl MACRO_LITERAL(5), %r9d
1354     jne .Lloop_over_fast_path_range_\suffix
1355
1356.Lfive_args_fast_path_range_\suffix:
1357     movl 16(%r11), %r9d
1358.Lfour_args_fast_path_range_\suffix:
1359     movl 12(%r11), %r8d
1360.Lthree_args_fast_path_range_\suffix:
1361     movl 8(%r11), %ecx
1362.Ltwo_args_fast_path_range_\suffix:
1363     movl 4(%r11), %edx
1364.Lone_arg_fast_path_range_\suffix:
1365     .if \is_static
1366     movl 0(%r11), %esi
1367     .else
1368     // First argument already in %esi.
1369     .endif
1370.Linvoke_fast_path_range_\suffix:
1371     call *ART_METHOD_QUICK_CODE_OFFSET_64(%rdi) // Call the method.
1372     ADVANCE_PC_FETCH_AND_GOTO_NEXT 3
1373
1374.Lfast_path_with_few_args_range_\suffix:
1375     // Fast path when we have zero or one argument (modulo 'this'). If there
1376     // is one argument, we can put it in both floating point and core register.
1377     movzbl 1(rPC), %r9d # Number of arguments
1378     .if \is_static
1379     cmpl MACRO_LITERAL(1), %r9d
1380     jl .Linvoke_with_few_args_range_\suffix
1381     jne .Lget_shorty_range_\suffix
1382     movzwl 4(rPC), %r9d  // Dex register of first argument
1383     GET_VREG %esi, %r9
1384     movd %esi, %xmm0
1385     .else
1386     cmpl MACRO_LITERAL(2), %r9d
1387     jl .Linvoke_with_few_args_range_\suffix
1388     jne .Lget_shorty_range_\suffix
1389     movzwl 4(rPC), %r9d
1390     addl MACRO_LITERAL(1), %r9d  // dex register of second argument
1391     GET_VREG %edx, %r9
1392     movd %edx, %xmm0
1393     .endif
1394.Linvoke_with_few_args_range_\suffix:
1395     // Check if the next instruction is move-result or move-result-wide.
1396     // If it is, we fetch the shorty and jump to the regular invocation.
1397     movzwq  6(rPC), %r9
1398     and MACRO_LITERAL(0xfe), %r9d
1399     cmpl MACRO_LITERAL(0x0a), %r9d
1400     je .Lget_shorty_and_invoke_range_\suffix
1401     call *ART_METHOD_QUICK_CODE_OFFSET_64(%rdi) // Call the method.
1402     ADVANCE_PC_FETCH_AND_GOTO_NEXT 3
1403.Lget_shorty_and_invoke_range_\suffix:
1404     .if \is_interface
1405     // Save interface method, used for conflict resolution, in a callee-save register.
1406     movq %rax, %xmm12
1407     .endif
1408     GET_SHORTY_SLOW_PATH rINSTq, \is_interface
1409     jmp .Lgpr_setup_finished_range_\suffix
1410   .endif
1411
1412.Lget_shorty_range_\suffix:
1413   .if \is_interface
1414   // Save interface method, used for conflict resolution, in a callee-saved register.
1415   movq %rax, %xmm12
1416   .endif
1417   GET_SHORTY rINSTq, \is_interface, \is_polymorphic, \is_custom
1418   // From this point:
1419   // - rINSTq contains shorty (in callee-save to switch over return value after call).
1420   // - rdi contains method
1421   // - rsi contains 'this' pointer for instance method.
1422   leaq 1(rINSTq), %r9  // shorty + 1  ; ie skip return arg character
1423   movzwl 4(rPC), %r10d // arg start index
1424   .if \is_string_init
1425   addq $$1, %r10       // arg start index
1426   movq $$1, %rbp       // index in stack
1427   .elseif \is_static
1428   movq $$0, %rbp       // index in stack
1429   .else
1430   addq $$1, %r10       // arg start index
1431   movq $$1, %rbp       // index in stack
1432   .endif
1433   LOOP_RANGE_OVER_SHORTY_LOADING_XMMS xmm0, r9, r10, rbp, .Lxmm_setup_finished_range_\suffix
1434   LOOP_RANGE_OVER_SHORTY_LOADING_XMMS xmm1, r9, r10, rbp, .Lxmm_setup_finished_range_\suffix
1435   LOOP_RANGE_OVER_SHORTY_LOADING_XMMS xmm2, r9, r10, rbp, .Lxmm_setup_finished_range_\suffix
1436   LOOP_RANGE_OVER_SHORTY_LOADING_XMMS xmm3, r9, r10, rbp, .Lxmm_setup_finished_range_\suffix
1437   LOOP_RANGE_OVER_SHORTY_LOADING_XMMS xmm4, r9, r10, rbp, .Lxmm_setup_finished_range_\suffix
1438   LOOP_RANGE_OVER_SHORTY_LOADING_XMMS xmm5, r9, r10, rbp, .Lxmm_setup_finished_range_\suffix
1439   LOOP_RANGE_OVER_SHORTY_LOADING_XMMS xmm6, r9, r10, rbp, .Lxmm_setup_finished_range_\suffix
1440   LOOP_RANGE_OVER_SHORTY_LOADING_XMMS xmm7, r9, r10, rbp, .Lxmm_setup_finished_range_\suffix
1441   LOOP_RANGE_OVER_FPs r9, r10, rbp, .Lxmm_setup_finished_range_\suffix
1442.Lxmm_setup_finished_range_\suffix:
1443   leaq 1(%rbx), %r11  // shorty + 1  ; ie skip return arg character
1444   movzwl 4(rPC), %r10d // arg start index
1445   .if \is_string_init
1446   addq $$1, %r10       // arg start index
1447   movq $$1, %rbp       // index in stack
1448   LOOP_RANGE_OVER_SHORTY_LOADING_GPRS rsi, esi, r11, r10, rbp, .Lgpr_setup_finished_range_\suffix
1449   .elseif \is_static
1450   movq $$0, %rbp // index in stack
1451   LOOP_RANGE_OVER_SHORTY_LOADING_GPRS rsi, esi, r11, r10, rbp, .Lgpr_setup_finished_range_\suffix
1452   .else
1453   addq $$1, %r10       // arg start index
1454   movq $$1, %rbp // index in stack
1455   .endif
1456   LOOP_RANGE_OVER_SHORTY_LOADING_GPRS rdx, edx, r11, r10, rbp, .Lgpr_setup_finished_range_\suffix
1457   LOOP_RANGE_OVER_SHORTY_LOADING_GPRS rcx, ecx, r11, r10, rbp, .Lgpr_setup_finished_range_\suffix
1458   LOOP_RANGE_OVER_SHORTY_LOADING_GPRS r8, r8d, r11, r10, rbp, .Lgpr_setup_finished_range_\suffix
1459   LOOP_RANGE_OVER_SHORTY_LOADING_GPRS r9, r9d, r11, r10, rbp, .Lgpr_setup_finished_range_\suffix
1460   LOOP_RANGE_OVER_INTs r11, r10, rbp, .Lgpr_setup_finished_range_\suffix
1461
1462.Lgpr_setup_finished_range_\suffix:
1463   .if \is_polymorphic
1464   call SYMBOL(art_quick_invoke_polymorphic)
1465   .elseif \is_custom
1466   call SYMBOL(art_quick_invoke_custom)
1467   .else
1468     .if \is_interface
1469     // Set the hidden argument for conflict resolution.
1470     movq %xmm12, %rax
1471     .endif
1472     call *ART_METHOD_QUICK_CODE_OFFSET_64(%rdi) // Call the method.
1473   .endif
1474   cmpb LITERAL(68), (%rbx)       // Test if result type char == 'D'.
1475   je .Lreturn_range_double_\suffix
1476   cmpb LITERAL(70), (%rbx)       // Test if result type char == 'F'.
1477   je .Lreturn_range_float_\suffix
1478   /* resume execution of caller */
1479.Ldone_return_range_\suffix:
1480   .if \is_string_init
1481   movzwl 4(rPC), %r11d // arguments
1482   GET_VREG %esi, %r11
1483   UPDATE_REGISTERS_FOR_STRING_INIT %esi, %eax
1484   .endif
1485
1486   .if \is_polymorphic
1487   ADVANCE_PC_FETCH_AND_GOTO_NEXT 4
1488   .else
1489   ADVANCE_PC_FETCH_AND_GOTO_NEXT 3
1490   .endif
1491.Lreturn_range_double_\suffix:
1492    movq %xmm0, %rax
1493    jmp .Ldone_return_range_\suffix
1494.Lreturn_range_float_\suffix:
1495    movd %xmm0, %eax
1496    jmp .Ldone_return_range_\suffix
1497.endm
1498
1499// Helper for static field get.
1500.macro OP_SGET load="movl", wide="0"
1501   // Fast-path which gets the field from thread-local cache.
1502%  fetch_from_thread_cache("%rax", miss_label="2f")
15031:
1504   movl ART_FIELD_OFFSET_OFFSET(%rax), %edx
1505   movl ART_FIELD_DECLARING_CLASS_OFFSET(%rax), %eax
1506   cmpq $$0, rSELF:THREAD_READ_BARRIER_MARK_REG00_OFFSET
1507   jne 3f
15084:
1509   .if \wide
1510   movq (%eax,%edx,1), %rax
1511   SET_WIDE_VREG %rax, rINSTq              # fp[A] <- value
1512   .else
1513   \load (%eax, %edx, 1), %eax
1514   SET_VREG %eax, rINSTq            # fp[A] <- value
1515   .endif
1516   ADVANCE_PC_FETCH_AND_GOTO_NEXT 2
15172:
1518   EXPORT_PC
1519   movq rSELF:THREAD_SELF_OFFSET, %rdi
1520   movq 0(%rsp), %rsi
1521   movq rPC, %rdx
1522   movq $$0, %rcx
1523   call nterp_get_static_field
1524   // Clear the marker that we put for volatile fields. The x86 memory
1525   // model doesn't require a barrier.
1526   andq $$-2, %rax
1527   jmp 1b
15283:
1529   call art_quick_read_barrier_mark_reg00
1530   jmp 4b
1531.endm
1532
1533// Helper for static field put.
1534.macro OP_SPUT rINST_reg="rINST", store="movl", wide="0":
1535   // Fast-path which gets the field from thread-local cache.
1536%  fetch_from_thread_cache("%rax", miss_label="2f")
15371:
1538   movl ART_FIELD_OFFSET_OFFSET(%rax), %edx
1539   movl ART_FIELD_DECLARING_CLASS_OFFSET(%rax), %eax
1540   cmpq $$0, rSELF:THREAD_READ_BARRIER_MARK_REG00_OFFSET
1541   jne 3f
15424:
1543   .if \wide
1544   GET_WIDE_VREG rINSTq, rINSTq           # rINST <- v[A]
1545   .else
1546   GET_VREG rINST, rINSTq                  # rINST <- v[A]
1547   .endif
1548   \store    \rINST_reg, (%rax,%rdx,1)
1549   ADVANCE_PC_FETCH_AND_GOTO_NEXT 2
15502:
1551   EXPORT_PC
1552   movq rSELF:THREAD_SELF_OFFSET, %rdi
1553   movq 0(%rsp), %rsi
1554   movq rPC, %rdx
1555   movq $$0, %rcx
1556   call nterp_get_static_field
1557   testq MACRO_LITERAL(1), %rax
1558   je 1b
1559   // Clear the marker that we put for volatile fields. The x86 memory
1560   // model doesn't require a barrier.
1561   CLEAR_VOLATILE_MARKER %rax
1562   movl ART_FIELD_OFFSET_OFFSET(%rax), %edx
1563   movl ART_FIELD_DECLARING_CLASS_OFFSET(%rax), %eax
1564   cmpq $$0, rSELF:THREAD_READ_BARRIER_MARK_REG00_OFFSET
1565   jne 6f
15665:
1567   .if \wide
1568   GET_WIDE_VREG rINSTq, rINSTq           # rINST <- v[A]
1569   .else
1570   GET_VREG rINST, rINSTq                  # rINST <- v[A]
1571   .endif
1572   \store    \rINST_reg, (%rax,%rdx,1)
1573   lock addl $$0, (%rsp)
1574   ADVANCE_PC_FETCH_AND_GOTO_NEXT 2
15753:
1576   call art_quick_read_barrier_mark_reg00
1577   jmp 4b
15786:
1579   call art_quick_read_barrier_mark_reg00
1580   jmp 5b
1581.endm
1582
1583
1584.macro OP_IPUT_INTERNAL rINST_reg="rINST", store="movl", wide="0":
1585   movzbq  rINSTbl, %rcx                   # rcx <- BA
1586   sarl    $$4, %ecx                       # ecx <- B
1587   GET_VREG %ecx, %rcx                     # vB (object we're operating on)
1588   testl   %ecx, %ecx                      # is object null?
1589   je      common_errNullObject
1590   andb    $$0xf, rINSTbl                  # rINST <- A
1591   .if \wide
1592   GET_WIDE_VREG rINSTq, rINSTq              # rax<- fp[A]/fp[A+1]
1593   .else
1594   GET_VREG rINST, rINSTq                  # rINST <- v[A]
1595   .endif
1596   \store \rINST_reg, (%rcx,%rax,1)
1597.endm
1598
1599// Helper for instance field put.
1600.macro OP_IPUT rINST_reg="rINST", store="movl", wide="0":
1601   // Fast-path which gets the field from thread-local cache.
1602%  fetch_from_thread_cache("%rax", miss_label="2f")
16031:
1604   OP_IPUT_INTERNAL \rINST_reg, \store, \wide
1605   ADVANCE_PC_FETCH_AND_GOTO_NEXT 2
16062:
1607   EXPORT_PC
1608   movq rSELF:THREAD_SELF_OFFSET, %rdi
1609   movq 0(%rsp), %rsi
1610   movq rPC, %rdx
1611   movq $$0, %rcx
1612   call nterp_get_instance_field_offset
1613   testl %eax, %eax
1614   jns 1b
1615   negl %eax
1616   OP_IPUT_INTERNAL \rINST_reg, \store, \wide
1617   lock addl $$0, (%rsp)
1618   ADVANCE_PC_FETCH_AND_GOTO_NEXT 2
1619.endm
1620
1621// Helper for instance field get.
1622.macro OP_IGET load="movl", wide="0"
1623   // Fast-path which gets the field from thread-local cache.
1624%  fetch_from_thread_cache("%rax", miss_label="2f")
16251:
1626   movl    rINST, %ecx                     # rcx <- BA
1627   sarl    $$4, %ecx                       # ecx <- B
1628   GET_VREG %ecx, %rcx                     # vB (object we're operating on)
1629   testl   %ecx, %ecx                      # is object null?
1630   je      common_errNullObject
1631   andb    $$0xf,rINSTbl                   # rINST <- A
1632   .if \wide
1633   movq (%rcx,%rax,1), %rax
1634   SET_WIDE_VREG %rax, rINSTq              # fp[A] <- value
1635   .else
1636   \load (%rcx,%rax,1), %eax
1637   SET_VREG %eax, rINSTq                   # fp[A] <- value
1638   .endif
1639   ADVANCE_PC_FETCH_AND_GOTO_NEXT 2
16402:
1641   EXPORT_PC
1642   movq rSELF:THREAD_SELF_OFFSET, %rdi
1643   movq 0(%rsp), %rsi
1644   movq rPC, %rdx
1645   movq $$0, %rcx
1646   call nterp_get_instance_field_offset
1647   testl %eax, %eax
1648   jns 1b
1649   negl %eax
1650   jmp 1b
1651.endm
1652
1653.macro SETUP_REFERENCE_PARAMETER_IN_GPR gpr32, regs, refs, ins, arg_offset, finished
1654    movl REG_VAR(gpr32), (REG_VAR(regs), REG_VAR(arg_offset))
1655    movl REG_VAR(gpr32), (REG_VAR(refs), REG_VAR(arg_offset))
1656    addq MACRO_LITERAL(4), REG_VAR(arg_offset)
1657    subl MACRO_LITERAL(1), REG_VAR(ins)
1658    je \finished
1659.endm
1660
1661// Uses eax as temporary
1662.macro SETUP_REFERENCE_PARAMETERS_IN_STACK regs, refs, ins, stack_ptr, arg_offset
16631:
1664    movl OFFSET_TO_FIRST_ARGUMENT_IN_STACK(REG_VAR(stack_ptr), REG_VAR(arg_offset)), %eax
1665    movl %eax, (REG_VAR(regs), REG_VAR(arg_offset))
1666    movl %eax, (REG_VAR(refs), REG_VAR(arg_offset))
1667    addq MACRO_LITERAL(4), REG_VAR(arg_offset)
1668    subl MACRO_LITERAL(1), REG_VAR(ins)
1669    jne 1b
1670.endm
1671
1672.macro CHECK_AND_UPDATE_SHARED_MEMORY_METHOD if_hot, if_not_hot
1673    testl $$ART_METHOD_IS_MEMORY_SHARED_FLAG, ART_METHOD_ACCESS_FLAGS_OFFSET(%rdi)
1674    jz \if_hot
1675    movzwl rSELF:THREAD_SHARED_METHOD_HOTNESS_OFFSET, %esi
1676    testl %esi, %esi
1677    je \if_hot
1678    addl $$-1, %esi
1679    movw %si, rSELF:THREAD_SHARED_METHOD_HOTNESS_OFFSET
1680    jmp \if_not_hot
1681.endm
1682
1683.macro DO_SUSPEND_CHECK continue_label
1684    testl   $$(THREAD_SUSPEND_OR_CHECKPOINT_REQUEST), rSELF:THREAD_FLAGS_OFFSET
1685    jz      \continue_label
1686    EXPORT_PC
1687    call    SYMBOL(art_quick_test_suspend)
1688.endm
1689
1690%def entry():
1691/*
1692 * ArtMethod entry point.
1693 *
1694 * On entry:
1695 *  rdi   ArtMethod* callee
1696 *  rest  method parameters
1697 */
1698
1699OAT_ENTRY ExecuteNterpWithClinitImpl, EndExecuteNterpWithClinitImpl
1700    // For simplicity, we don't do a read barrier here, but instead rely
1701    // on art_quick_resolution_trampoline to always have a suspend point before
1702    // calling back here.
1703    movl ART_METHOD_DECLARING_CLASS_OFFSET(%rdi), %r10d
1704    cmpb  $$(MIRROR_CLASS_IS_VISIBLY_INITIALIZED_VALUE), MIRROR_CLASS_IS_VISIBLY_INITIALIZED_OFFSET(%r10d)
1705    jae ExecuteNterpImpl
1706    cmpb  $$(MIRROR_CLASS_IS_INITIALIZING_VALUE), MIRROR_CLASS_IS_VISIBLY_INITIALIZED_OFFSET(%r10d)
1707    jb art_quick_resolution_trampoline
1708    movl MIRROR_CLASS_CLINIT_THREAD_ID_OFFSET(%r10d), %r10d
1709    cmpl %r10d, rSELF:THREAD_TID_OFFSET
1710    je ExecuteNterpImpl
1711    jmp art_quick_resolution_trampoline
1712EndExecuteNterpWithClinitImpl:
1713
1714OAT_ENTRY ExecuteNterpImpl, EndExecuteNterpImpl
1715    .cfi_startproc
1716    .cfi_def_cfa rsp, 8
1717    testq %rax, -STACK_OVERFLOW_RESERVED_BYTES(%rsp)
1718    /* Spill callee save regs */
1719    SPILL_ALL_CALLEE_SAVES
1720
1721    movq ART_METHOD_DATA_OFFSET_64(%rdi), rPC
1722
1723    // Setup the stack for executing the method.
1724    SETUP_STACK_FRAME rPC, rREFS, rREFS32, rFP, CFI_REFS, load_ins=1
1725
1726    // Setup the parameters
1727    testl %r14d, %r14d
1728    je .Lxmm_setup_finished
1729
1730    subq %r14, %rbx
1731    salq $$2, %rbx // rbx is now the offset for inputs into the registers array.
1732
1733    testl $$ART_METHOD_NTERP_ENTRY_POINT_FAST_PATH_FLAG, ART_METHOD_ACCESS_FLAGS_OFFSET(%rdi)
1734    je .Lsetup_slow_path
1735    leaq (rFP, %rbx, 1), %rdi
1736    leaq (rREFS, %rbx, 1), %rbx
1737    movq $$0, %r10
1738
1739    SETUP_REFERENCE_PARAMETER_IN_GPR esi, rdi, rbx, r14d, r10, .Lxmm_setup_finished
1740    SETUP_REFERENCE_PARAMETER_IN_GPR edx, rdi, rbx, r14d, r10, .Lxmm_setup_finished
1741    SETUP_REFERENCE_PARAMETER_IN_GPR ecx, rdi, rbx, r14d, r10, .Lxmm_setup_finished
1742    SETUP_REFERENCE_PARAMETER_IN_GPR r8d, rdi, rbx, r14d, r10, .Lxmm_setup_finished
1743    SETUP_REFERENCE_PARAMETER_IN_GPR r9d, rdi, rbx, r14d, r10, .Lxmm_setup_finished
1744    SETUP_REFERENCE_PARAMETERS_IN_STACK rdi, rbx, r14d, r11, r10
1745    jmp .Lxmm_setup_finished
1746
1747.Lsetup_slow_path:
1748    // If the method is not static and there is one argument ('this'), we don't need to fetch the
1749    // shorty.
1750    testl $$ART_METHOD_IS_STATIC_FLAG, ART_METHOD_ACCESS_FLAGS_OFFSET(%rdi)
1751    jne .Lsetup_with_shorty
1752
1753    movl %esi, (rFP, %rbx)
1754    movl %esi, (rREFS, %rbx)
1755
1756    cmpl $$1, %r14d
1757    je .Lxmm_setup_finished
1758
1759.Lsetup_with_shorty:
1760    // TODO: Get shorty in a better way and remove below
1761    push %rdi
1762    push %rsi
1763    push %rdx
1764    push %rcx
1765    push %r8
1766    push %r9
1767
1768    // Save xmm registers + alignment.
1769    subq MACRO_LITERAL(8 * 8 + 8), %rsp
1770    movq %xmm0, 0(%rsp)
1771    movq %xmm1, 8(%rsp)
1772    movq %xmm2, 16(%rsp)
1773    movq %xmm3, 24(%rsp)
1774    movq %xmm4, 32(%rsp)
1775    movq %xmm5, 40(%rsp)
1776    movq %xmm6, 48(%rsp)
1777    movq %xmm7, 56(%rsp)
1778
1779    call SYMBOL(NterpGetShorty)
1780    // Save shorty in callee-save rbp.
1781    movq %rax, %rbp
1782
1783    // Restore xmm registers + alignment.
1784    movq 0(%rsp), %xmm0
1785    movq 8(%rsp), %xmm1
1786    movq 16(%rsp), %xmm2
1787    movq 24(%rsp), %xmm3
1788    movq 32(%rsp), %xmm4
1789    movq 40(%rsp), %xmm5
1790    movq 48(%rsp), %xmm6
1791    movq 56(%rsp), %xmm7
1792    addq MACRO_LITERAL(8 * 8 + 8), %rsp
1793
1794    pop %r9
1795    pop %r8
1796    pop %rcx
1797    pop %rdx
1798    pop %rsi
1799    pop %rdi
1800    // Reload the old stack pointer, which used to be stored in %r11, which is not callee-saved.
1801    movq -8(rREFS), %r11
1802    // TODO: Get shorty in a better way and remove above
1803
1804    movq $$0, %r14
1805    testl $$ART_METHOD_IS_STATIC_FLAG, ART_METHOD_ACCESS_FLAGS_OFFSET(%rdi)
1806
1807    // Available: rdi, r10
1808    // Note the leaq below don't change the flags.
1809    leaq 1(%rbp), %r10  // shorty + 1  ; ie skip return arg character
1810    leaq (rFP, %rbx, 1), %rdi
1811    leaq (rREFS, %rbx, 1), %rbx
1812    jne .Lhandle_static_method
1813    addq $$4, %rdi
1814    addq $$4, %rbx
1815    addq $$4, %r11
1816    jmp .Lcontinue_setup_gprs
1817.Lhandle_static_method:
1818    LOOP_OVER_SHORTY_STORING_GPRS rsi, esi, r10, r14, rdi, rbx, .Lgpr_setup_finished
1819.Lcontinue_setup_gprs:
1820    LOOP_OVER_SHORTY_STORING_GPRS rdx, edx, r10, r14, rdi, rbx, .Lgpr_setup_finished
1821    LOOP_OVER_SHORTY_STORING_GPRS rcx, ecx, r10, r14, rdi, rbx, .Lgpr_setup_finished
1822    LOOP_OVER_SHORTY_STORING_GPRS r8, r8d, r10, r14, rdi, rbx, .Lgpr_setup_finished
1823    LOOP_OVER_SHORTY_STORING_GPRS r9, r9d, r10, r14, rdi, rbx, .Lgpr_setup_finished
1824    LOOP_OVER_INTs r10, r14, rdi, rbx, r11, .Lgpr_setup_finished
1825.Lgpr_setup_finished:
1826    leaq 1(%rbp), %r10  // shorty + 1  ; ie skip return arg character
1827    movq $$0, %r14 // reset counter
1828    LOOP_OVER_SHORTY_STORING_XMMS xmm0, r10, r14, rdi, .Lxmm_setup_finished
1829    LOOP_OVER_SHORTY_STORING_XMMS xmm1, r10, r14, rdi, .Lxmm_setup_finished
1830    LOOP_OVER_SHORTY_STORING_XMMS xmm2, r10, r14, rdi, .Lxmm_setup_finished
1831    LOOP_OVER_SHORTY_STORING_XMMS xmm3, r10, r14, rdi, .Lxmm_setup_finished
1832    LOOP_OVER_SHORTY_STORING_XMMS xmm4, r10, r14, rdi, .Lxmm_setup_finished
1833    LOOP_OVER_SHORTY_STORING_XMMS xmm5, r10, r14, rdi, .Lxmm_setup_finished
1834    LOOP_OVER_SHORTY_STORING_XMMS xmm6, r10, r14, rdi, .Lxmm_setup_finished
1835    LOOP_OVER_SHORTY_STORING_XMMS xmm7, r10, r14, rdi, .Lxmm_setup_finished
1836    LOOP_OVER_FPs r10, r14, rdi, r11, .Lxmm_setup_finished
1837.Lxmm_setup_finished:
1838    CFI_DEFINE_DEX_PC_WITH_OFFSET(CFI_TMP, CFI_DEX, 0)
1839
1840    // Set rIBASE
1841    leaq artNterpAsmInstructionStart(%rip), rIBASE
1842    /* start executing the instruction at rPC */
1843    START_EXECUTING_INSTRUCTIONS
1844    /* NOTE: no fallthrough */
1845    // cfi info continues, and covers the whole nterp implementation.
1846    END ExecuteNterpImpl
1847
1848%def opcode_pre():
1849
1850%def fetch_from_thread_cache(dest_reg, miss_label):
1851   // Fetch some information from the thread cache.
1852   // Uses rax, rdx, rcx as temporaries.
1853   movq rSELF:THREAD_SELF_OFFSET, %rax
1854   movq rPC, %rdx
1855   salq MACRO_LITERAL(THREAD_INTERPRETER_CACHE_SIZE_SHIFT), %rdx
1856   andq MACRO_LITERAL(THREAD_INTERPRETER_CACHE_SIZE_MASK), %rdx
1857   cmpq THREAD_INTERPRETER_CACHE_OFFSET(%rax, %rdx, 1), rPC
1858   jne ${miss_label}
1859   movq __SIZEOF_POINTER__+THREAD_INTERPRETER_CACHE_OFFSET(%rax, %rdx, 1), ${dest_reg}
1860
1861%def footer():
1862/*
1863 * ===========================================================================
1864 *  Common subroutines and data
1865 * ===========================================================================
1866 */
1867
1868    .text
1869    .align  2
1870
1871// Enclose all code below in a symbol (which gets printed in backtraces).
1872ENTRY nterp_helper
1873
1874// Note: mterp also uses the common_* names below for helpers, but that's OK
1875// as the C compiler compiled each interpreter separately.
1876common_errDivideByZero:
1877    EXPORT_PC
1878    call art_quick_throw_div_zero
1879
1880// Expect array in edi, index in esi.
1881common_errArrayIndex:
1882    EXPORT_PC
1883    movl MIRROR_ARRAY_LENGTH_OFFSET(%edi), %eax
1884    movl %esi, %edi
1885    movl %eax, %esi
1886    call art_quick_throw_array_bounds
1887
1888common_errNullObject:
1889    EXPORT_PC
1890    call art_quick_throw_null_pointer_exception
1891
1892NterpCommonInvokeStatic:
1893    COMMON_INVOKE_NON_RANGE is_static=1, is_interface=0, suffix="invokeStatic"
1894
1895NterpCommonInvokeStaticRange:
1896    COMMON_INVOKE_RANGE is_static=1, is_interface=0, suffix="invokeStatic"
1897
1898NterpCommonInvokeInstance:
1899    COMMON_INVOKE_NON_RANGE is_static=0, is_interface=0, suffix="invokeInstance"
1900
1901NterpCommonInvokeInstanceRange:
1902    COMMON_INVOKE_RANGE is_static=0, is_interface=0, suffix="invokeInstance"
1903
1904NterpCommonInvokeInterface:
1905    COMMON_INVOKE_NON_RANGE is_static=0, is_interface=1, suffix="invokeInterface"
1906
1907NterpCommonInvokeInterfaceRange:
1908    COMMON_INVOKE_RANGE is_static=0, is_interface=1, suffix="invokeInterface"
1909
1910NterpCommonInvokePolymorphic:
1911    COMMON_INVOKE_NON_RANGE is_static=0, is_interface=0, is_string_init=0, is_polymorphic=1, suffix="invokePolymorphic"
1912
1913NterpCommonInvokePolymorphicRange:
1914    COMMON_INVOKE_RANGE is_static=0, is_interface=0, is_polymorphic=1, suffix="invokePolymorphic"
1915
1916NterpCommonInvokeCustom:
1917    COMMON_INVOKE_NON_RANGE is_static=1, is_interface=0, is_string_init=0, is_polymorphic=0, is_custom=1, suffix="invokeCustom"
1918
1919NterpCommonInvokeCustomRange:
1920    COMMON_INVOKE_RANGE is_static=1, is_interface=0, is_polymorphic=0, is_custom=1, suffix="invokeCustom"
1921
1922NterpHandleStringInit:
1923   COMMON_INVOKE_NON_RANGE is_static=0, is_interface=0, is_string_init=1, suffix="stringInit"
1924
1925NterpHandleStringInitRange:
1926   COMMON_INVOKE_RANGE is_static=0, is_interface=0, is_string_init=1, suffix="stringInit"
1927
1928NterpNewInstance:
1929   EXPORT_PC
1930   // Fast-path which gets the class from thread-local cache.
1931%  fetch_from_thread_cache("%rdi", miss_label="2f")
1932   cmpq $$0, rSELF:THREAD_READ_BARRIER_MARK_REG00_OFFSET
1933   jne 3f
19344:
1935   callq *rSELF:THREAD_ALLOC_OBJECT_ENTRYPOINT_OFFSET
19361:
1937   SET_VREG_OBJECT %eax, rINSTq            # fp[A] <- value
1938   ADVANCE_PC_FETCH_AND_GOTO_NEXT 2
19392:
1940   movq rSELF:THREAD_SELF_OFFSET, %rdi
1941   movq 0(%rsp), %rsi
1942   movq rPC, %rdx
1943   call nterp_allocate_object
1944   jmp 1b
19453:
1946   // 07 is %rdi
1947   call art_quick_read_barrier_mark_reg07
1948   jmp 4b
1949
1950NterpNewArray:
1951   /* new-array vA, vB, class@CCCC */
1952   EXPORT_PC
1953   // Fast-path which gets the class from thread-local cache.
1954%  fetch_from_thread_cache("%rdi", miss_label="2f")
1955   cmpq $$0, rSELF:THREAD_READ_BARRIER_MARK_REG00_OFFSET
1956   jne 3f
19571:
1958   movzbl  rINSTbl,%esi
1959   sarl    $$4,%esi                          # esi<- B
1960   GET_VREG %esi %rsi                        # esi<- vB (array length)
1961   andb    $$0xf,rINSTbl                     # rINST<- A
1962   callq *rSELF:THREAD_ALLOC_ARRAY_ENTRYPOINT_OFFSET
1963   SET_VREG_OBJECT %eax, rINSTq            # fp[A] <- value
1964   ADVANCE_PC_FETCH_AND_GOTO_NEXT 2
19652:
1966   movq rSELF:THREAD_SELF_OFFSET, %rdi
1967   movq 0(%rsp), %rsi
1968   movq rPC, %rdx
1969   call nterp_get_class
1970   movq %rax, %rdi
1971   jmp 1b
19723:
1973   // 07 is %rdi
1974   call art_quick_read_barrier_mark_reg07
1975   jmp 1b
1976
1977NterpPutObjectInstanceField:
1978   movl    rINST, %ebp                     # rbp <- BA
1979   andl    $$0xf, %ebp                     # rbp <- A
1980   GET_VREG %ecx, %rbp                     # ecx <- v[A]
1981   sarl    $$4, rINST
1982   // Fast-path which gets the field from thread-local cache.
1983%  fetch_from_thread_cache("%rax", miss_label="2f")
19841:
1985   GET_VREG rINST, rINSTq                  # vB (object we're operating on)
1986   testl   rINST, rINST                    # is object null?
1987   je      common_errNullObject
1988   movl %ecx, (rINSTq,%rax,1)
1989   testl %ecx, %ecx
1990   je 4f
1991   movq rSELF:THREAD_CARD_TABLE_OFFSET, %rax
1992   shrq $$CARD_TABLE_CARD_SHIFT, rINSTq
1993   movb %al, (%rax, rINSTq, 1)
19944:
1995   ADVANCE_PC_FETCH_AND_GOTO_NEXT 2
19962:
1997   EXPORT_PC
1998   movq rSELF:THREAD_SELF_OFFSET, %rdi
1999   movq 0(%rsp), %rsi
2000   movq rPC, %rdx
2001   // %rcx is already set.
2002   call nterp_get_instance_field_offset
2003   // Reload the value as it may have moved.
2004   GET_VREG %ecx, %rbp                     # ecx <- v[A]
2005   testl %eax, %eax
2006   jns 1b
2007   GET_VREG rINST, rINSTq                  # vB (object we're operating on)
2008   testl   rINST, rINST                    # is object null?
2009   je      common_errNullObject
2010   negl %eax
2011   movl %ecx, (rINSTq,%rax,1)
2012   testl %ecx, %ecx
2013   je 5f
2014   movq rSELF:THREAD_CARD_TABLE_OFFSET, %rax
2015   shrq $$CARD_TABLE_CARD_SHIFT, rINSTq
2016   movb %al, (%rax, rINSTq, 1)
20175:
2018   lock addl $$0, (%rsp)
2019   ADVANCE_PC_FETCH_AND_GOTO_NEXT 2
2020
2021NterpGetObjectInstanceField:
2022   // Fast-path which gets the field from thread-local cache.
2023%  fetch_from_thread_cache("%rax", miss_label="2f")
20241:
2025   movl    rINST, %ecx                     # rcx <- BA
2026   sarl    $$4, %ecx                       # ecx <- B
2027   GET_VREG %ecx, %rcx                     # vB (object we're operating on)
2028   testl   %ecx, %ecx                      # is object null?
2029   je      common_errNullObject
2030   testb $$READ_BARRIER_TEST_VALUE, GRAY_BYTE_OFFSET(%ecx)
2031   movl (%rcx,%rax,1), %eax
2032   jnz 3f
20334:
2034   andb    $$0xf,rINSTbl                   # rINST <- A
2035   SET_VREG_OBJECT %eax, rINSTq            # fp[A] <- value
2036   ADVANCE_PC_FETCH_AND_GOTO_NEXT 2
20372:
2038   EXPORT_PC
2039   movq rSELF:THREAD_SELF_OFFSET, %rdi
2040   movq 0(%rsp), %rsi
2041   movq rPC, %rdx
2042   movq $$0, %rcx
2043   call nterp_get_instance_field_offset
2044   testl %eax, %eax
2045   jns 1b
2046   // For volatile fields, we return a negative offset. Remove the sign
2047   // and no need for any barrier thanks to the memory model.
2048   negl %eax
2049   jmp 1b
20503:
2051   // reg00 is eax
2052   call art_quick_read_barrier_mark_reg00
2053   jmp 4b
2054
2055NterpPutObjectStaticField:
2056   GET_VREG %ebp, rINSTq
2057   // Fast-path which gets the field from thread-local cache.
2058%  fetch_from_thread_cache("%rax", miss_label="2f")
20591:
2060   movl ART_FIELD_OFFSET_OFFSET(%rax), %edx
2061   movl ART_FIELD_DECLARING_CLASS_OFFSET(%rax), %eax
2062   cmpq $$0, rSELF:THREAD_READ_BARRIER_MARK_REG00_OFFSET
2063   jne 3f
20645:
2065   movl %ebp, (%eax, %edx, 1)
2066   testl %ebp, %ebp
2067   je 4f
2068   movq rSELF:THREAD_CARD_TABLE_OFFSET, %rcx
2069   shrq $$CARD_TABLE_CARD_SHIFT, %rax
2070   movb %cl, (%rax, %rcx, 1)
20714:
2072   ADVANCE_PC_FETCH_AND_GOTO_NEXT 2
20732:
2074   EXPORT_PC
2075   movq rSELF:THREAD_SELF_OFFSET, %rdi
2076   movq 0(%rsp), %rsi
2077   movq rPC, %rdx
2078   movq %rbp, %rcx
2079   call nterp_get_static_field
2080   // Reload the value as it may have moved.
2081   GET_VREG %ebp, rINSTq
2082   testq MACRO_LITERAL(1), %rax
2083   je 1b
2084   CLEAR_VOLATILE_MARKER %rax
2085   movl ART_FIELD_OFFSET_OFFSET(%rax), %edx
2086   movl ART_FIELD_DECLARING_CLASS_OFFSET(%rax), %eax
2087   cmpq $$0, rSELF:THREAD_READ_BARRIER_MARK_REG00_OFFSET
2088   jne 7f
20896:
2090   movl %ebp, (%eax, %edx, 1)
2091   testl %ebp, %ebp
2092   je 8f
2093   movq rSELF:THREAD_CARD_TABLE_OFFSET, %rcx
2094   shrq $$CARD_TABLE_CARD_SHIFT, %rax
2095   movb %cl, (%rax, %rcx, 1)
20968:
2097   lock addl $$0, (%rsp)
2098   ADVANCE_PC_FETCH_AND_GOTO_NEXT 2
20993:
2100   call art_quick_read_barrier_mark_reg00
2101   jmp 5b
21027:
2103   call art_quick_read_barrier_mark_reg00
2104   jmp 6b
2105
2106NterpGetObjectStaticField:
2107   // Fast-path which gets the field from thread-local cache.
2108%  fetch_from_thread_cache("%rax", miss_label="2f")
21091:
2110   movl ART_FIELD_OFFSET_OFFSET(%rax), %edx
2111   movl ART_FIELD_DECLARING_CLASS_OFFSET(%rax), %eax
2112   cmpq $$0, rSELF:THREAD_READ_BARRIER_MARK_REG00_OFFSET
2113   jne 5f
21146:
2115   testb $$READ_BARRIER_TEST_VALUE, GRAY_BYTE_OFFSET(%eax)
2116   movl (%eax, %edx, 1), %eax
2117   jnz 3f
21184:
2119   SET_VREG_OBJECT %eax, rINSTq            # fp[A] <- value
2120   ADVANCE_PC_FETCH_AND_GOTO_NEXT 2
21212:
2122   EXPORT_PC
2123   movq rSELF:THREAD_SELF_OFFSET, %rdi
2124   movq 0(%rsp), %rsi
2125   movq rPC, %rdx
2126   movq $$0, %rcx
2127   call nterp_get_static_field
2128   andq $$-2, %rax
2129   jmp 1b
21303:
2131   call art_quick_read_barrier_mark_reg00
2132   jmp 4b
21335:
2134   call art_quick_read_barrier_mark_reg00
2135   jmp 6b
2136
2137NterpGetBooleanStaticField:
2138  OP_SGET load="movsbl", wide=0
2139
2140NterpGetByteStaticField:
2141  OP_SGET load="movsbl", wide=0
2142
2143NterpGetCharStaticField:
2144  OP_SGET load="movzwl", wide=0
2145
2146NterpGetShortStaticField:
2147  OP_SGET load="movswl", wide=0
2148
2149NterpGetWideStaticField:
2150  OP_SGET load="movq", wide=1
2151
2152NterpGetIntStaticField:
2153  OP_SGET load="movl", wide=0
2154
2155NterpPutStaticField:
2156  OP_SPUT rINST_reg=rINST, store="movl", wide=0
2157
2158NterpPutBooleanStaticField:
2159NterpPutByteStaticField:
2160  OP_SPUT rINST_reg=rINSTbl, store="movb", wide=0
2161
2162NterpPutCharStaticField:
2163NterpPutShortStaticField:
2164  OP_SPUT rINST_reg=rINSTw, store="movw", wide=0
2165
2166NterpPutWideStaticField:
2167  OP_SPUT rINST_reg=rINSTq, store="movq", wide=1
2168
2169NterpPutInstanceField:
2170  OP_IPUT rINST_reg=rINST, store="movl", wide=0
2171
2172NterpPutBooleanInstanceField:
2173NterpPutByteInstanceField:
2174  OP_IPUT rINST_reg=rINSTbl, store="movb", wide=0
2175
2176NterpPutCharInstanceField:
2177NterpPutShortInstanceField:
2178  OP_IPUT rINST_reg=rINSTw, store="movw", wide=0
2179
2180NterpPutWideInstanceField:
2181  OP_IPUT rINST_reg=rINSTq, store="movq", wide=1
2182
2183NterpGetBooleanInstanceField:
2184  OP_IGET load="movzbl", wide=0
2185
2186NterpGetByteInstanceField:
2187  OP_IGET load="movsbl", wide=0
2188
2189NterpGetCharInstanceField:
2190  OP_IGET load="movzwl", wide=0
2191
2192NterpGetShortInstanceField:
2193  OP_IGET load="movswl", wide=0
2194
2195NterpGetWideInstanceField:
2196  OP_IGET load="movq", wide=1
2197
2198NterpGetInstanceField:
2199  OP_IGET load="movl", wide=0
2200
2201NterpHandleHotnessOverflow:
2202    CHECK_AND_UPDATE_SHARED_MEMORY_METHOD if_hot=1f, if_not_hot=4f
22031:
2204    movq rPC, %rsi
2205    movq rFP, %rdx
2206    call nterp_hot_method
2207    testq %rax, %rax
2208    jne 3f
22092:
2210    FETCH_INST
2211    GOTO_NEXT
22123:
2213    // Drop the current frame.
2214    movq -8(rREFS), %rsp
2215    CFI_DEF_CFA(rsp, CALLEE_SAVES_SIZE)
2216
2217    // Setup the new frame
2218    movq OSR_DATA_FRAME_SIZE(%rax), %rcx
2219    // Given stack size contains all callee saved registers, remove them.
2220    subq $$CALLEE_SAVES_SIZE, %rcx
2221
2222    // Remember CFA.
2223    movq %rsp, %rbp
2224    CFI_DEF_CFA_REGISTER(rbp)
2225
2226    subq %rcx, %rsp
2227    movq %rsp, %rdi               // rdi := beginning of stack
2228    leaq OSR_DATA_MEMORY(%rax), %rsi  // rsi := memory to copy
2229    rep movsb                     // while (rcx--) { *rdi++ = *rsi++ }
2230
2231    // Fetch the native PC to jump to and save it in a callee-save register.
2232    movq OSR_DATA_NATIVE_PC(%rax), %rbx
2233
2234    // Free the memory holding OSR Data.
2235    movq %rax, %rdi
2236    call free
2237
2238    // Jump to the compiled code.
2239    jmp *%rbx
22404:
2241    DO_SUSPEND_CHECK continue_label=2b
2242    jmp 2b
2243
2244NterpHandleInvokeInterfaceOnObjectMethodRange:
2245   shrl $$16, %eax
2246   movq MIRROR_CLASS_VTABLE_OFFSET_64(%edx, %eax, 8), %rdi
2247   jmp NterpCommonInvokeInstanceRange
2248
2249NterpHandleInvokeInterfaceOnObjectMethod:
2250   shrl $$16, %eax
2251   movq MIRROR_CLASS_VTABLE_OFFSET_64(%edx, %eax, 8), %rdi
2252   jmp NterpCommonInvokeInstance
2253
2254// This is the logical end of ExecuteNterpImpl, where the frame info applies.
2255// EndExecuteNterpImpl includes the methods below as we want the runtime to
2256// see them as part of the Nterp PCs.
2257.cfi_endproc
2258
2259nterp_to_nterp_static_non_range:
2260    .cfi_startproc
2261    .cfi_def_cfa rsp, 8
2262    SETUP_STACK_FOR_INVOKE
2263    SETUP_NON_RANGE_ARGUMENTS_AND_EXECUTE is_static=1, is_string_init=0
2264    .cfi_endproc
2265
2266nterp_to_nterp_string_init_non_range:
2267    .cfi_startproc
2268    .cfi_def_cfa rsp, 8
2269    SETUP_STACK_FOR_INVOKE
2270    SETUP_NON_RANGE_ARGUMENTS_AND_EXECUTE is_static=0, is_string_init=1
2271    .cfi_endproc
2272
2273nterp_to_nterp_instance_non_range:
2274    .cfi_startproc
2275    .cfi_def_cfa rsp, 8
2276    SETUP_STACK_FOR_INVOKE
2277    SETUP_NON_RANGE_ARGUMENTS_AND_EXECUTE is_static=0, is_string_init=0
2278    .cfi_endproc
2279
2280nterp_to_nterp_static_range:
2281    .cfi_startproc
2282    .cfi_def_cfa rsp, 8
2283    SETUP_STACK_FOR_INVOKE
2284    SETUP_RANGE_ARGUMENTS_AND_EXECUTE is_static=1
2285    .cfi_endproc
2286
2287nterp_to_nterp_instance_range:
2288    .cfi_startproc
2289    .cfi_def_cfa rsp, 8
2290    SETUP_STACK_FOR_INVOKE
2291    SETUP_RANGE_ARGUMENTS_AND_EXECUTE is_static=0
2292    .cfi_endproc
2293
2294nterp_to_nterp_string_init_range:
2295    .cfi_startproc
2296    .cfi_def_cfa rsp, 8
2297    SETUP_STACK_FOR_INVOKE
2298    SETUP_RANGE_ARGUMENTS_AND_EXECUTE is_static=0, is_string_init=1
2299    .cfi_endproc
2300
2301END nterp_helper
2302
2303// This is the end of PCs contained by the OatQuickMethodHeader created for the interpreter
2304// entry point.
2305    FUNCTION_TYPE(EndExecuteNterpImpl)
2306    ASM_HIDDEN SYMBOL(EndExecuteNterpImpl)
2307    .global SYMBOL(EndExecuteNterpImpl)
2308SYMBOL(EndExecuteNterpImpl):
2309
2310// Entrypoints into runtime.
2311NTERP_TRAMPOLINE nterp_get_static_field, NterpGetStaticField
2312NTERP_TRAMPOLINE nterp_get_instance_field_offset, NterpGetInstanceFieldOffset
2313NTERP_TRAMPOLINE nterp_filled_new_array, NterpFilledNewArray
2314NTERP_TRAMPOLINE nterp_filled_new_array_range, NterpFilledNewArrayRange
2315NTERP_TRAMPOLINE nterp_get_class, NterpGetClass
2316NTERP_TRAMPOLINE nterp_allocate_object, NterpAllocateObject
2317NTERP_TRAMPOLINE nterp_get_method, NterpGetMethod
2318NTERP_TRAMPOLINE nterp_hot_method, NterpHotMethod
2319NTERP_TRAMPOLINE nterp_load_object, NterpLoadObject
2320
2321DEFINE_FUNCTION nterp_deliver_pending_exception
2322    DELIVER_PENDING_EXCEPTION
2323END_FUNCTION nterp_deliver_pending_exception
2324
2325// gen_mterp.py will inline the following definitions
2326// within [ExecuteNterpImpl, EndExecuteNterpImpl).
2327%def instruction_end():
2328
2329    FUNCTION_TYPE(artNterpAsmInstructionEnd)
2330    ASM_HIDDEN SYMBOL(artNterpAsmInstructionEnd)
2331    .global SYMBOL(artNterpAsmInstructionEnd)
2332SYMBOL(artNterpAsmInstructionEnd):
2333    // artNterpAsmInstructionEnd is used as landing pad for exception handling.
2334    FETCH_INST
2335    GOTO_NEXT
2336
2337%def instruction_start():
2338
2339    FUNCTION_TYPE(artNterpAsmInstructionStart)
2340    ASM_HIDDEN SYMBOL(artNterpAsmInstructionStart)
2341    .global SYMBOL(artNterpAsmInstructionStart)
2342SYMBOL(artNterpAsmInstructionStart) = .L_op_nop
2343    .text
2344
2345%def opcode_name_prefix():
2346%   return "nterp_"
2347%def opcode_start():
2348    ENTRY nterp_${opcode}
2349%def opcode_end():
2350    END nterp_${opcode}
2351    // Advance to the end of this handler. Causes error if we are past that point.
2352    .org nterp_${opcode} + NTERP_HANDLER_SIZE  // ${opcode} handler is too big!
2353%def opcode_slow_path_start(name):
2354    ENTRY ${name}
2355%def opcode_slow_path_end(name):
2356    END ${name}
2357