• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1%def bincmp(revcmp=""):
2/*
3 * Generic two-operand compare-and-branch operation.  Provide a "revcmp"
4 * fragment that specifies the *reverse* comparison to perform, e.g.
5 * for "if-le" you would use "gt".
6 *
7 * For: if-eq, if-ne, if-lt, if-ge, if-gt, if-le
8 */
9    /* if-cmp vA, vB, +CCCC */
10    movzx   rINSTbl, %ecx                   # ecx <- A+
11    andb    $$0xf, %cl                      # ecx <- A
12    GET_VREG %eax, %ecx                     # eax <- vA
13    sarl    $$4, rINST                      # rINST <- B
14    cmpl    VREG_ADDRESS(rINST), %eax       # compare (vA, vB)
15    j${revcmp}   1f
16    movswl  2(rPC), rINST                   # Get signed branch offset
17    testl   rINST, rINST
18    jmp     MterpCommonTakenBranch
191:
20    cmpw    $$JIT_CHECK_OSR, rPROFILE
21    je      .L_check_not_taken_osr
22    ADVANCE_PC_FETCH_AND_GOTO_NEXT 2
23
24%def zcmp(revcmp=""):
25/*
26 * Generic one-operand compare-and-branch operation.  Provide a "revcmp"
27 * fragment that specifies the *reverse* comparison to perform, e.g.
28 * for "if-le" you would use "gt".
29 *
30 * for: if-eqz, if-nez, if-ltz, if-gez, if-gtz, if-lez
31 */
32    /* if-cmp vAA, +BBBB */
33    cmpl    $$0, VREG_ADDRESS(rINST)        # compare (vA, 0)
34    j${revcmp}   1f
35    movswl  2(rPC), rINST                   # fetch signed displacement
36    testl   rINST, rINST
37    jmp     MterpCommonTakenBranch
381:
39    cmpw    $$JIT_CHECK_OSR, rPROFILE
40    je      .L_check_not_taken_osr
41    ADVANCE_PC_FETCH_AND_GOTO_NEXT 2
42
43%def op_goto():
44/*
45 * Unconditional branch, 8-bit offset.
46 *
47 * The branch distance is a signed code-unit offset, which we need to
48 * double to get a byte offset.
49 */
50    /* goto +AA */
51    movsbl  rINSTbl, rINST                  # rINST <- ssssssAA
52    testl   rINST, rINST
53    jmp     MterpCommonTakenBranch
54
55%def op_goto_16():
56/*
57 * Unconditional branch, 16-bit offset.
58 *
59 * The branch distance is a signed code-unit offset, which we need to
60 * double to get a byte offset.
61 */
62    /* goto/16 +AAAA */
63    movswl  2(rPC), rINST                   # rINST <- ssssAAAA
64    testl   rINST, rINST
65    jmp     MterpCommonTakenBranch
66
67%def op_goto_32():
68/*
69 * Unconditional branch, 32-bit offset.
70 *
71 * The branch distance is a signed code-unit offset, which we need to
72 * double to get a byte offset.
73 *
74 * Unlike most opcodes, this one is allowed to branch to itself, so
75 * our "backward branch" test must be "<=0" instead of "<0".  Because
76 * we need the V bit set, we'll use an adds to convert from Dalvik
77 * offset to byte offset.
78 */
79    /* goto/32 +AAAAAAAA */
80    movl    2(rPC), rINST                   # rINST <- AAAAAAAA
81    testl   rINST, rINST
82    jmp     MterpCommonTakenBranch
83
84%def op_if_eq():
85%  bincmp(revcmp="ne")
86
87%def op_if_eqz():
88%  zcmp(revcmp="ne")
89
90%def op_if_ge():
91%  bincmp(revcmp="l")
92
93%def op_if_gez():
94%  zcmp(revcmp="l")
95
96%def op_if_gt():
97%  bincmp(revcmp="le")
98
99%def op_if_gtz():
100%  zcmp(revcmp="le")
101
102%def op_if_le():
103%  bincmp(revcmp="g")
104
105%def op_if_lez():
106%  zcmp(revcmp="g")
107
108%def op_if_lt():
109%  bincmp(revcmp="ge")
110
111%def op_if_ltz():
112%  zcmp(revcmp="ge")
113
114%def op_if_ne():
115%  bincmp(revcmp="e")
116
117%def op_if_nez():
118%  zcmp(revcmp="e")
119
120%def op_packed_switch(func="MterpDoPackedSwitch"):
121/*
122 * Handle a packed-switch or sparse-switch instruction.  In both cases
123 * we decode it and hand it off to a helper function.
124 *
125 * We don't really expect backward branches in a switch statement, but
126 * they're perfectly legal, so we check for them here.
127 *
128 * for: packed-switch, sparse-switch
129 */
130    /* op vAA, +BBBB */
131    movl    2(rPC), %ecx                    # ecx <- BBBBbbbb
132    GET_VREG %eax, rINST                    # eax <- vAA
133    leal    (rPC,%ecx,2), %ecx              # ecx <- PC + BBBBbbbb*2
134    movl    %eax, OUT_ARG1(%esp)            # ARG1 <- vAA
135    movl    %ecx, OUT_ARG0(%esp)            # ARG0 <- switchData
136    call    SYMBOL($func)
137    REFRESH_IBASE
138    testl   %eax, %eax
139    movl    %eax, rINST
140    jmp     MterpCommonTakenBranch
141
142%def op_return():
143/*
144 * Return a 32-bit value.
145 *
146 * for: return, return-object
147 */
148    /* op vAA */
149    .extern MterpThreadFenceForConstructor
150    call    SYMBOL(MterpThreadFenceForConstructor)
151    movl    rSELF, %eax
152    testl   $$(THREAD_SUSPEND_OR_CHECKPOINT_REQUEST), THREAD_FLAGS_OFFSET(%eax)
153    jz      1f
154    movl    %eax, OUT_ARG0(%esp)
155    call    SYMBOL(MterpSuspendCheck)
1561:
157    GET_VREG %eax, rINST                    # eax <- vAA
158    xorl    %ecx, %ecx
159    jmp     MterpReturn
160
161%def op_return_object():
162%  op_return()
163
164%def op_return_void():
165    .extern MterpThreadFenceForConstructor
166    call    SYMBOL(MterpThreadFenceForConstructor)
167    movl    rSELF, %eax
168    testl   $$(THREAD_SUSPEND_OR_CHECKPOINT_REQUEST), THREAD_FLAGS_OFFSET(%eax)
169    jz      1f
170    movl    %eax, OUT_ARG0(%esp)
171    call    SYMBOL(MterpSuspendCheck)
1721:
173    xorl    %eax, %eax
174    xorl    %ecx, %ecx
175    jmp     MterpReturn
176
177%def op_return_void_no_barrier():
178    movl    rSELF, %eax
179    testl   $$(THREAD_SUSPEND_OR_CHECKPOINT_REQUEST), THREAD_FLAGS_OFFSET(%eax)
180    jz      1f
181    movl    %eax, OUT_ARG0(%esp)
182    call    SYMBOL(MterpSuspendCheck)
1831:
184    xorl    %eax, %eax
185    xorl    %ecx, %ecx
186    jmp     MterpReturn
187
188%def op_return_wide():
189/*
190 * Return a 64-bit value.
191 */
192    /* return-wide vAA */
193    .extern MterpThreadFenceForConstructor
194    call    SYMBOL(MterpThreadFenceForConstructor)
195    movl    rSELF, %eax
196    testl   $$(THREAD_SUSPEND_OR_CHECKPOINT_REQUEST), THREAD_FLAGS_OFFSET(%eax)
197    jz      1f
198    movl    %eax, OUT_ARG0(%esp)
199    call    SYMBOL(MterpSuspendCheck)
2001:
201    GET_VREG %eax, rINST                    # eax <- v[AA+0]
202    GET_VREG_HIGH %ecx, rINST               # ecx <- v[AA+1]
203    jmp     MterpReturn
204
205%def op_sparse_switch():
206%  op_packed_switch(func="MterpDoSparseSwitch")
207
208%def op_throw():
209/*
210 * Throw an exception object in the current thread.
211 */
212    /* throw vAA */
213    EXPORT_PC
214    GET_VREG %eax, rINST                    # eax<- vAA (exception object)
215    testl   %eax, %eax
216    jz      common_errNullObject
217    movl    rSELF,%ecx
218    movl    %eax, THREAD_EXCEPTION_OFFSET(%ecx)
219    jmp     MterpException
220