• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1%def bincmp(condition=""):
2    /*
3     * Generic two-operand compare-and-branch operation.  Provide a "condition"
4     * fragment that specifies the comparison to perform.
5     *
6     * For: if-eq, if-ne, if-lt, if-ge, if-gt, if-le
7     */
8    /* if-cmp vA, vB, +CCCC */
9    GET_OPA4(a0)                           #  a0 <- A+
10    GET_OPB(a1)                            #  a1 <- B
11    GET_VREG(a3, a1)                       #  a3 <- vB
12    GET_VREG(a0, a0)                       #  a0 <- vA
13    FETCH_S(rINST, 1)                      #  rINST<- branch offset, in code units
14    b${condition} a0, a3, MterpCommonTakenBranchNoFlags  #  compare (vA, vB)
15    li        t0, JIT_CHECK_OSR
16    beq       rPROFILE, t0, .L_check_not_taken_osr
17    FETCH_ADVANCE_INST(2)                  #  advance rPC, load rINST
18    GET_INST_OPCODE(t0)                    #  extract opcode from rINST
19    GOTO_OPCODE(t0)                        #  jump to next instruction
20
21%def zcmp(condition=""):
22    /*
23     * Generic one-operand compare-and-branch operation.  Provide a "condition"
24     * fragment that specifies the comparison to perform.
25     *
26     * for: if-eqz, if-nez, if-ltz, if-gez, if-gtz, if-lez
27     */
28    /* if-cmp vAA, +BBBB */
29    GET_OPA(a0)                            #  a0 <- AA
30    GET_VREG(a0, a0)                       #  a0 <- vAA
31    FETCH_S(rINST, 1)                      #  rINST <- branch offset, in code units
32    b${condition} a0, zero, MterpCommonTakenBranchNoFlags
33    li        t0, JIT_CHECK_OSR            # possible OSR re-entry?
34    beq       rPROFILE, t0, .L_check_not_taken_osr
35    FETCH_ADVANCE_INST(2)                  #  advance rPC, load rINST
36    GET_INST_OPCODE(t0)                    #  extract opcode from rINST
37    GOTO_OPCODE(t0)                        #  jump to next instruction
38
39%def op_goto():
40    /*
41     * Unconditional branch, 8-bit offset.
42     *
43     * The branch distance is a signed code-unit offset, which we need to
44     * double to get a byte offset.
45     */
46    /* goto +AA */
47    sll       a0, rINST, 16                #  a0 <- AAxx0000
48    sra       rINST, a0, 24                #  rINST <- ssssssAA (sign-extended)
49    b       MterpCommonTakenBranchNoFlags
50
51%def op_goto_16():
52    /*
53     * Unconditional branch, 16-bit offset.
54     *
55     * The branch distance is a signed code-unit offset, which we need to
56     * double to get a byte offset.
57     */
58    /* goto/16 +AAAA */
59    FETCH_S(rINST, 1)                      #  rINST <- ssssAAAA (sign-extended)
60    b       MterpCommonTakenBranchNoFlags
61
62%def op_goto_32():
63    /*
64     * Unconditional branch, 32-bit offset.
65     *
66     * The branch distance is a signed code-unit offset, which we need to
67     * double to get a byte offset.
68     *
69     * Unlike most opcodes, this one is allowed to branch to itself, so
70     * our "backward branch" test must be "<=0" instead of "<0".
71     */
72    /* goto/32 +AAAAAAAA */
73    FETCH(rINST, 1)                        #  rINST <- aaaa (lo)
74    FETCH(a1, 2)                           #  a1 <- AAAA (hi)
75    INSERT_HIGH_HALF(rINST, a1)            #  rINST <- AAAAaaaa
76    b         MterpCommonTakenBranchNoFlags
77
78%def op_if_eq():
79%  bincmp(condition="eq")
80
81%def op_if_eqz():
82%  zcmp(condition="eq")
83
84%def op_if_ge():
85%  bincmp(condition="ge")
86
87%def op_if_gez():
88%  zcmp(condition="ge")
89
90%def op_if_gt():
91%  bincmp(condition="gt")
92
93%def op_if_gtz():
94%  zcmp(condition="gt")
95
96%def op_if_le():
97%  bincmp(condition="le")
98
99%def op_if_lez():
100%  zcmp(condition="le")
101
102%def op_if_lt():
103%  bincmp(condition="lt")
104
105%def op_if_ltz():
106%  zcmp(condition="lt")
107
108%def op_if_ne():
109%  bincmp(condition="ne")
110
111%def op_if_nez():
112%  zcmp(condition="ne")
113
114%def op_packed_switch(func="MterpDoPackedSwitch"):
115    /*
116     * Handle a packed-switch or sparse-switch instruction.  In both cases
117     * we decode it and hand it off to a helper function.
118     *
119     * We don't really expect backward branches in a switch statement, but
120     * they're perfectly legal, so we check for them here.
121     *
122     * for: packed-switch, sparse-switch
123     */
124    /* op vAA, +BBBB */
125    FETCH(a0, 1)                           #  a0 <- bbbb (lo)
126    FETCH(a1, 2)                           #  a1 <- BBBB (hi)
127    GET_OPA(a3)                            #  a3 <- AA
128    INSERT_HIGH_HALF(a0, a1)               #  a0 <- BBBBbbbb
129    GET_VREG(a1, a3)                       #  a1 <- vAA
130    EAS1(a0, rPC, a0)                      #  a0 <- PC + BBBBbbbb*2
131    JAL($func)                             #  a0 <- code-unit branch offset
132    move      rINST, v0
133    b         MterpCommonTakenBranchNoFlags
134
135%def op_return():
136    /*
137     * Return a 32-bit value.
138     *
139     * for: return, return-object
140     */
141    /* op vAA */
142    .extern MterpThreadFenceForConstructor
143    JAL(MterpThreadFenceForConstructor)
144    lw        ra, THREAD_FLAGS_OFFSET(rSELF)
145    move      a0, rSELF
146    and       ra, THREAD_SUSPEND_OR_CHECKPOINT_REQUEST
147    beqz      ra, 1f
148    JAL(MterpSuspendCheck)                 # (self)
1491:
150    GET_OPA(a2)                            #  a2 <- AA
151    GET_VREG(v0, a2)                       #  v0 <- vAA
152    move      v1, zero
153    b         MterpReturn
154
155%def op_return_object():
156%  op_return()
157
158%def op_return_void():
159    .extern MterpThreadFenceForConstructor
160    JAL(MterpThreadFenceForConstructor)
161    lw        ra, THREAD_FLAGS_OFFSET(rSELF)
162    move      a0, rSELF
163    and       ra, THREAD_SUSPEND_OR_CHECKPOINT_REQUEST
164    beqz      ra, 1f
165    JAL(MterpSuspendCheck)                 # (self)
1661:
167    move      v0, zero
168    move      v1, zero
169    b         MterpReturn
170
171%def op_return_void_no_barrier():
172    lw     ra, THREAD_FLAGS_OFFSET(rSELF)
173    move   a0, rSELF
174    and    ra, THREAD_SUSPEND_OR_CHECKPOINT_REQUEST
175    beqz   ra, 1f
176    JAL(MterpSuspendCheck)                 # (self)
1771:
178    move   v0, zero
179    move   v1, zero
180    b      MterpReturn
181
182%def op_return_wide():
183    /*
184     * Return a 64-bit value.
185     */
186    /* return-wide vAA */
187    .extern MterpThreadFenceForConstructor
188    JAL(MterpThreadFenceForConstructor)
189    lw        ra, THREAD_FLAGS_OFFSET(rSELF)
190    move      a0, rSELF
191    and       ra, THREAD_SUSPEND_OR_CHECKPOINT_REQUEST
192    beqz      ra, 1f
193    JAL(MterpSuspendCheck)                 # (self)
1941:
195    GET_OPA(a2)                            #  a2 <- AA
196    EAS2(a2, rFP, a2)                      #  a2 <- &fp[AA]
197    LOAD64(v0, v1, a2)                     #  v0/v1 <- vAA/vAA+1
198    b         MterpReturn
199
200%def op_sparse_switch():
201%  op_packed_switch(func="MterpDoSparseSwitch")
202
203%def op_throw():
204    /*
205     * Throw an exception object in the current thread.
206     */
207    /* throw vAA */
208    EXPORT_PC()                              #  exception handler can throw
209    GET_OPA(a2)                              #  a2 <- AA
210    GET_VREG(a1, a2)                         #  a1 <- vAA (exception object)
211    # null object?
212    beqz  a1, common_errNullObject           #  yes, throw an NPE instead
213    sw    a1, THREAD_EXCEPTION_OFFSET(rSELF) #  thread->exception <- obj
214    b         MterpException
215