• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1%def op_check_cast():
2%  slow_path = add_slow_path(op_check_cast_slow_path)
3   // Fast-path which gets the class from thread-local cache.
4%  fetch_from_thread_cache("x1", miss_label="2f")
51:
6   lsr     w2, wINST, #8               // w2<- A
7   GET_VREG w0, w2                     // w0<- vA (object)
8   cbz     w0, .L${opcode}_resume
9   ldr     w2, [x0, #MIRROR_OBJECT_CLASS_OFFSET]
10   // Fast path: do a comparison without read barrier.
11   cmp     w1, w2
12   bne     ${slow_path}
13.L${opcode}_resume:
14   FETCH_ADVANCE_INST 2
15   GET_INST_OPCODE ip
16   GOTO_OPCODE ip
172:
18   EXPORT_PC
19   mov     x0, xSELF
20   ldr     x1, [sp]
21   mov     x2, xPC
22   bl      nterp_get_class_or_allocate_object
23   mov     x1, x0
24   b       1b
25
26%def op_check_cast_slow_path():
27   // We don't do read barriers for simplicity. However, this means that x1
28   // (and all other fetched objects) may be a from-space reference. Tthat's OK as
29   // we only fetch constant information from the references.
30   // This also means that some of the comparisons below may lead to false negative,
31   // but it will eventually be handled in the runtime.
32   ldr     w3, [x1, #MIRROR_CLASS_ACCESS_FLAGS_OFFSET]
33   tbnz    w3, #MIRROR_CLASS_IS_INTERFACE_FLAG_BIT, 2f
34   ldr     w3, [x1, #MIRROR_CLASS_COMPONENT_TYPE_OFFSET]
35   cbnz    w3, 5f
361:
37   ldr     w2, [x2, #MIRROR_CLASS_SUPER_CLASS_OFFSET]
38   cmp     w1, w2
39   beq     .L${opcode}_resume
40   cbnz    w2, 1b
412:
42   TEST_IF_MARKING 4f
433:
44   EXPORT_PC
45   bl      art_quick_check_instance_of
46   b       .L${opcode}_resume
474:
48   bl      art_quick_read_barrier_mark_reg01
49   b       3b
505:
51   // Class in w1 is an array, w3 is the component type.
52   ldr     w2, [x2, #MIRROR_CLASS_COMPONENT_TYPE_OFFSET]
53   // Check if object is an array.
54   cbz     w2, 2b
55   ldr     w4, [x3, #MIRROR_CLASS_SUPER_CLASS_OFFSET]
56   // If the super class of the component type is not null, go slow path.
57   cbnz    w4, 2b
58   ldrh    w3, [x3, #MIRROR_CLASS_OBJECT_PRIMITIVE_TYPE_OFFSET]
59   // If the component type is primitive, go slow path.
60   cbnz    w3, 2b
61   // Check if the object is a primitive array.
62   ldrh    w2, [x2, #MIRROR_CLASS_OBJECT_PRIMITIVE_TYPE_OFFSET]
63   cbz     w2, .L${opcode}_resume
64   // Go slow path for throwing the exception.
65   b 2b
66
67%def op_instance_of():
68%  slow_path = add_slow_path(op_instance_of_slow_path)
69   /* instance-of vA, vB, class@CCCC */
70   // Fast-path which gets the class from thread-local cache.
71%  fetch_from_thread_cache("x1", miss_label="2f")
721:
73   lsr     w2, wINST, #12              // w2<- B
74   GET_VREG w0, w2                     // w0<- vB (object)
75   cbz     w0, .L${opcode}_resume
76   ldr     w2, [x0, #MIRROR_OBJECT_CLASS_OFFSET]
77   // Fast path: do a comparison without read barrier.
78   cmp     w1, w2
79   bne     ${slow_path}
80.L${opcode}_set_one:
81   mov     w0, #1
82.L${opcode}_resume:
83   ubfx    w1, wINST, #8, #4           // w1<- A
84   SET_VREG w0, w1
85   FETCH_ADVANCE_INST 2
86   GET_INST_OPCODE ip
87   GOTO_OPCODE ip
882:
89   EXPORT_PC
90   mov     x0, xSELF
91   ldr     x1, [sp]
92   mov     x2, xPC
93   bl      nterp_get_class_or_allocate_object
94   mov     x1, x0
95   b       1b
96
97%def op_instance_of_slow_path():
98   // Go slow path if we are marking. Checking now allows
99   // not going to slow path if the super class hierarchy check fails.
100   TEST_IF_MARKING 4f
101   ldr     w3, [x1, #MIRROR_CLASS_ACCESS_FLAGS_OFFSET]
102   tbnz    w3, #MIRROR_CLASS_IS_INTERFACE_FLAG_BIT, 5f
103   ldr     w3, [x1, #MIRROR_CLASS_COMPONENT_TYPE_OFFSET]
104   cbnz    w3, 3f
1051:
106   ldr     w2, [x2, #MIRROR_CLASS_SUPER_CLASS_OFFSET]
107   cmp     w1, w2
108   beq     .L${opcode}_set_one
109   cbnz    w2, 1b
1102:
111   mov     w0, #0
112   b       .L${opcode}_resume
1133:
114   // Class in x1 is an array, x3 is the component type of x1, and x2 is the class of the object.
115   ldr     w2, [x2, #MIRROR_CLASS_COMPONENT_TYPE_OFFSET]
116   // Check if object is an array.
117   cbz     w2, 2b
118   // Check of x1 is Object[]
119   ldr     w4, [x3, #MIRROR_CLASS_SUPER_CLASS_OFFSET]
120   // If the super class is not Object, go to slow path.
121   cbnz    w4, 5f
122   // Super class is null, this could either be a primitive array or Object[].
123   ldrh    w3, [x3, #MIRROR_CLASS_OBJECT_PRIMITIVE_TYPE_OFFSET]
124   // If x1 is a primitive array class, we know the check is false.
125   cbnz    w3, 2b
126   // Check if x2 is a primitive array class.
127   ldrh    w2, [x2, #MIRROR_CLASS_OBJECT_PRIMITIVE_TYPE_OFFSET]
128   cmp     w2, #0
129   cset    w0, eq
130   b       .L${opcode}_resume
1314:
132   bl      art_quick_read_barrier_mark_reg01
1335:
134   EXPORT_PC
135   bl      artInstanceOfFromCode
136   b       .L${opcode}_resume
137
138%def op_iget_boolean():
139%  op_iget(load="ldrb", volatile_load="ldarb", maybe_extend="", wide="0", is_object="0")
140
141%def op_iget_byte():
142%  op_iget(load="ldrsb", volatile_load="ldarb", maybe_extend="sxtb w0, w0", wide="0", is_object="0")
143
144%def op_iget_char():
145%  op_iget(load="ldrh", volatile_load="ldarh", maybe_extend="", wide="0", is_object="0")
146
147%def op_iget_short():
148%  op_iget(load="ldrsh", volatile_load="ldarh", maybe_extend="sxth w0, w0", wide="0", is_object="0")
149
150%def op_iget(load="ldr", volatile_load="ldar", maybe_extend="", wide="0", is_object="0"):
151%  slow_path = add_slow_path(op_iget_slow_path, volatile_load, maybe_extend, wide, is_object)
152   // Fast-path which gets the field from thread-local cache.
153%  fetch_from_thread_cache("x0", miss_label=slow_path)
154.L${opcode}_resume:
155   lsr     w2, wINST, #12              // w2<- B
156   GET_VREG w3, w2                     // w3<- object we're operating on
157   ubfx    w2, wINST, #8, #4           // w2<- A
158   cbz     w3, common_errNullObject    // object was null
159   .if $wide
160   $load   x0, [x3, x0]
161   SET_VREG_WIDE x0, w2                // fp[A] <- value
162   .elseif $is_object
163   $load   w0, [x3, x0]
164   TEST_IF_MARKING .L${opcode}_read_barrier
165.L${opcode}_resume_after_read_barrier:
166   SET_VREG_OBJECT w0, w2              // fp[A] <- value
167   .else
168   $load   w0, [x3, x0]
169   SET_VREG w0, w2                     // fp[A] <- value
170   .endif
171   FETCH_ADVANCE_INST 2
172   GET_INST_OPCODE ip
173   GOTO_OPCODE ip
174   .if $is_object
175.L${opcode}_read_barrier:
176   bl      art_quick_read_barrier_mark_reg00
177   b       .L${opcode}_resume_after_read_barrier
178   .endif
179
180%def op_iget_slow_path(volatile_load, maybe_extend, wide, is_object):
181   mov     x0, xSELF
182   ldr     x1, [sp]
183   mov     x2, xPC
184   mov     x3, #0
185   EXPORT_PC
186   bl      nterp_get_instance_field_offset
187   tbz     w0, #31, .L${opcode}_resume
188   CLEAR_INSTANCE_VOLATILE_MARKER w0
189   lsr     w2, wINST, #12              // w2<- B
190   GET_VREG w3, w2                     // w3<- object we're operating on
191   ubfx    w2, wINST, #8, #4           // w2<- A
192   cbz     w3, common_errNullObject    // object was null
193   add     x3, x3, x0
194   .if $wide
195   $volatile_load x0, [x3]
196   SET_VREG_WIDE x0, w2                // fp[A] <- value
197   .elseif $is_object
198   $volatile_load w0, [x3]
199   TEST_IF_MARKING .L${opcode}_read_barrier
200   SET_VREG_OBJECT w0, w2              // fp[A] <- value
201   .else
202   $volatile_load w0, [x3]
203   $maybe_extend
204   SET_VREG w0, w2                     // fp[A] <- value
205   .endif
206   FETCH_ADVANCE_INST 2
207   GET_INST_OPCODE ip
208   GOTO_OPCODE ip
209
210%def op_iget_wide():
211%  op_iget(load="ldr", volatile_load="ldar", maybe_extend="", wide="1", is_object="0")
212
213%def op_iget_object():
214%  op_iget(load="ldr", volatile_load="ldar", maybe_extend="", wide="0", is_object="1")
215
216%def op_iput_boolean():
217%  op_iput(store="strb", volatile_store="stlrb", wide="0", is_object="0")
218
219%def op_iput_byte():
220%  op_iput(store="strb", volatile_store="stlrb", wide="0", is_object="0")
221
222%def op_iput_char():
223%  op_iput(store="strh", volatile_store="stlrh", wide="0", is_object="0")
224
225%def op_iput_short():
226%  op_iput(store="strh", volatile_store="stlrh", wide="0", is_object="0")
227
228%def op_iput(store="str", volatile_store="stlr", wide="0", is_object="0"):
229%  slow_path = add_slow_path(op_iput_slow_path, volatile_store, wide, is_object)
230   ubfx    w1, wINST, #8, #4           // w1<- A
231   .if $wide
232   GET_VREG_WIDE x26, w1               // x26<- fp[A]/fp[A+1]
233   .else
234   GET_VREG w26, w1                    // w26 <- v[A]
235   .endif
236   // Fast-path which gets the field from thread-local cache.
237%  fetch_from_thread_cache("x0", miss_label=slow_path)
238.L${opcode}_resume:
239   lsr     w2, wINST, #12              // w2<- B
240   GET_VREG w2, w2                     // vB (object we're operating on)
241   cbz w2, common_errNullObject
242   .if $wide
243   $store  x26, [x2, x0]
244   .else
245   $store  w26, [x2, x0]
246   WRITE_BARRIER_IF_OBJECT $is_object, w26, w2, .L${opcode}_skip_write_barrier
247   .endif
248   FETCH_ADVANCE_INST 2
249   GET_INST_OPCODE ip
250   GOTO_OPCODE ip
251
252%def op_iput_slow_path(volatile_store, wide, is_object):
253   mov     x0, xSELF
254   ldr     x1, [sp]
255   mov     x2, xPC
256   .if $is_object
257   mov     x3, x26
258   .else
259   mov     x3, #0
260   .endif
261   EXPORT_PC
262   bl      nterp_get_instance_field_offset
263   .if $is_object
264   // Reload the value as it may have moved.
265   ubfx    w1, wINST, #8, #4           // w1<- A
266   GET_VREG w26, w1                    // w26 <- v[A]
267   .endif
268   tbz     w0, #31, .L${opcode}_resume
269   CLEAR_INSTANCE_VOLATILE_MARKER w0
270   lsr     w2, wINST, #12              // w2<- B
271   GET_VREG w2, w2                     // vB (object we're operating on)
272   cbz     w2, common_errNullObject
273   add     x3, x2, x0
274   .if $wide
275   $volatile_store x26, [x3]
276   .else
277   $volatile_store w26, [x3]
278   WRITE_BARRIER_IF_OBJECT $is_object, w26, w2, .L${opcode}_slow_path_skip_write_barrier
279   .endif
280   FETCH_ADVANCE_INST 2
281   GET_INST_OPCODE ip
282   GOTO_OPCODE ip
283
284%def op_iput_wide():
285%  op_iput(store="str", volatile_store="stlr", wide="1", is_object="0")
286
287%def op_iput_object():
288%  op_iput(store="str", volatile_store="stlr", wide="0", is_object="1")
289
290%def op_sget_boolean():
291%  op_sget(load="ldrb", volatile_load="ldarb", maybe_extend="", wide="0", is_object="0")
292
293%def op_sget_byte():
294%  op_sget(load="ldrsb", volatile_load="ldarb", maybe_extend="sxtb w0, w0", wide="0", is_object="0")
295
296%def op_sget_char():
297%  op_sget(load="ldrh", volatile_load="ldarh", maybe_extend="", wide="0", is_object="0")
298
299%def op_sget_short():
300%  op_sget(load="ldrsh", volatile_load="ldarh", maybe_extend="sxth w0, w0", wide="0", is_object="0")
301
302%def op_sget(load="ldr", volatile_load="ldar", maybe_extend="", wide="0", is_object="0"):
303%  slow_path = add_slow_path(op_sget_slow_path, volatile_load, maybe_extend, wide, is_object)
304   // Fast-path which gets the field from thread-local cache.
305%  fetch_from_thread_cache("x0", miss_label=slow_path)
306.L${opcode}_resume:
307   ldr     w1, [x0, #ART_FIELD_OFFSET_OFFSET]
308   lsr     w2, wINST, #8               // w2 <- A
309   ldr     w0, [x0, #ART_FIELD_DECLARING_CLASS_OFFSET]
310   TEST_IF_MARKING .L${opcode}_read_barrier
311.L${opcode}_resume_after_read_barrier:
312   .if $wide
313   ldr     x0, [x0, x1]
314   SET_VREG_WIDE x0, w2                // fp[A] <- value
315   .elseif $is_object
316   $load   w0, [x0, x1]
317   // No need to check the marking register, we know it's not set here.
318.L${opcode}_after_reference_load:
319   SET_VREG_OBJECT w0, w2              // fp[A] <- value
320   .else
321   $load   w0, [x0, x1]
322   SET_VREG w0, w2                     // fp[A] <- value
323   .endif
324   FETCH_ADVANCE_INST 2
325   GET_INST_OPCODE ip
326   GOTO_OPCODE ip
327.L${opcode}_read_barrier:
328   bl      art_quick_read_barrier_mark_reg00
329   .if $is_object
330   $load   w0, [x0, x1]
331.L${opcode}_mark_after_load:
332   // Here, we know the marking register is set.
333   bl      art_quick_read_barrier_mark_reg00
334   b       .L${opcode}_after_reference_load
335   .else
336   b       .L${opcode}_resume_after_read_barrier
337   .endif
338
339%def op_sget_slow_path(volatile_load, maybe_extend, wide, is_object):
340   mov     x0, xSELF
341   ldr     x1, [sp]
342   mov     x2, xPC
343   mov     x3, #0
344   EXPORT_PC
345   bl      nterp_get_static_field
346   tbz     x0, #0, .L${opcode}_resume
347   CLEAR_STATIC_VOLATILE_MARKER x0
348   ldr     w1, [x0, #ART_FIELD_OFFSET_OFFSET]
349   lsr     w2, wINST, #8               // w2 <- A
350   ldr     w0, [x0, #ART_FIELD_DECLARING_CLASS_OFFSET]
351   TEST_IF_MARKING .L${opcode}_slow_path_read_barrier
352.L${opcode}_slow_path_resume_after_read_barrier:
353   add     x0, x0, x1
354   .if $wide
355   ldar    x0, [x0]
356   SET_VREG_WIDE x0, w2                // fp[A] <- value
357   .elseif $is_object
358   $volatile_load w0, [x0]
359   TEST_IF_MARKING .L${opcode}_mark_after_load
360   SET_VREG_OBJECT w0, w2              // fp[A] <- value
361   .else
362   $volatile_load w0, [x0]
363   $maybe_extend
364   SET_VREG w0, w2                     // fp[A] <- value
365   .endif
366   FETCH_ADVANCE_INST 2
367   GET_INST_OPCODE ip
368   GOTO_OPCODE ip
369.L${opcode}_slow_path_read_barrier:
370   bl      art_quick_read_barrier_mark_reg00
371   b       .L${opcode}_slow_path_resume_after_read_barrier
372
373%def op_sget_wide():
374%  op_sget(load="ldr", volatile_load="ldar", maybe_extend="", wide="1", is_object="0")
375
376%def op_sget_object():
377%  op_sget(load="ldr", volatile_load="ldar", maybe_extend="", wide="0", is_object="1")
378
379%def op_sput_boolean():
380%  op_sput(store="strb", volatile_store="stlrb", wide="0", is_object="0")
381
382%def op_sput_byte():
383%  op_sput(store="strb", volatile_store="stlrb", wide="0", is_object="0")
384
385%def op_sput_char():
386%  op_sput(store="strh", volatile_store="stlrh", wide="0", is_object="0")
387
388%def op_sput_short():
389%  op_sput(store="strh", volatile_store="stlrh", wide="0", is_object="0")
390
391%def op_sput(store="str", volatile_store="stlr", wide="0", is_object="0"):
392%  slow_path = add_slow_path(op_sput_slow_path, volatile_store, wide, is_object)
393   lsr     w2, wINST, #8               // w2 <- A
394   .if $wide
395   GET_VREG_WIDE x26, w2               // x26 <- v[A]
396   .else
397   GET_VREG w26, w2                    // w26 <- v[A]
398   .endif
399   // Fast-path which gets the field from thread-local cache.
400%  fetch_from_thread_cache("x0", miss_label=slow_path)
401.L${opcode}_resume:
402   ldr     w1, [x0, #ART_FIELD_OFFSET_OFFSET]
403   ldr     w0, [x0, #ART_FIELD_DECLARING_CLASS_OFFSET]
404   TEST_IF_MARKING .L${opcode}_read_barrier
405.L${opcode}_resume_after_read_barrier:
406   .if $wide
407   $store  x26, [x0, x1]
408   .else
409   $store  w26, [x0, x1]
410   WRITE_BARRIER_IF_OBJECT $is_object, w26, w0, .L${opcode}_skip_write_barrier
411   .endif
412   FETCH_ADVANCE_INST 2
413   GET_INST_OPCODE ip
414   GOTO_OPCODE ip
415.L${opcode}_read_barrier:
416   bl      art_quick_read_barrier_mark_reg00
417   b       .L${opcode}_resume_after_read_barrier
418
419%def op_sput_slow_path(volatile_store, wide, is_object):
420   mov     x0, xSELF
421   ldr     x1, [sp]
422   mov     x2, xPC
423   .if $is_object
424   mov     x3, x26
425   .else
426   mov     x3, #0
427   .endif
428   EXPORT_PC
429   bl      nterp_get_static_field
430   .if $is_object
431   // Reload the value as it may have moved.
432   lsr     w2, wINST, #8               // w2 <- A
433   GET_VREG w26, w2                    // w26 <- v[A]
434   .endif
435   tbz     x0, #0, .L${opcode}_resume
436   CLEAR_STATIC_VOLATILE_MARKER x0
437   ldr     w1, [x0, #ART_FIELD_OFFSET_OFFSET]
438   ldr     w0, [x0, #ART_FIELD_DECLARING_CLASS_OFFSET]
439   TEST_IF_MARKING .L${opcode}_slow_path_read_barrier
440.L${opcode}_slow_path_resume_after_read_barrier:
441   add     x1, x0, x1
442   .if $wide
443   $volatile_store    x26, [x1]
444   .else
445   $volatile_store    w26, [x1]
446   WRITE_BARRIER_IF_OBJECT $is_object, w26, w0, .L${opcode}_slow_path_skip_write_barrier
447   .endif
448   FETCH_ADVANCE_INST 2
449   GET_INST_OPCODE ip
450   GOTO_OPCODE ip
451.L${opcode}_slow_path_read_barrier:
452   bl      art_quick_read_barrier_mark_reg00
453   b       .L${opcode}_slow_path_resume_after_read_barrier
454
455%def op_sput_wide():
456%  op_sput(store="str", volatile_store="stlr", wide="1", is_object="0")
457
458%def op_sput_object():
459%  op_sput(store="str", volatile_store="stlr", wide="0", is_object="1")
460
461%def op_new_instance():
462   EXPORT_PC
463   // Fast-path which gets the class from thread-local cache.
464%  fetch_from_thread_cache("x0", miss_label="2f")
465   TEST_IF_MARKING 3f
4664:
467   ldr     lr, [xSELF, #THREAD_ALLOC_OBJECT_ENTRYPOINT_OFFSET]
468   blr     lr
4691:
470   lsr     w1, wINST, #8               // w1 <- A
471   SET_VREG_OBJECT w0, w1              // fp[A] <- value
472   FETCH_ADVANCE_INST 2
473   GET_INST_OPCODE ip
474   GOTO_OPCODE ip
4752:
476   mov     x0, xSELF
477   ldr     x1, [sp]
478   mov     x2, xPC
479   bl      nterp_get_class_or_allocate_object
480   b       1b
4813:
482   bl      art_quick_read_barrier_mark_reg00
483   b       4b
484