• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1%def op_check_cast():
2%  slow_path = add_slow_path(op_check_cast_slow_path)
3   // Fast-path which gets the class from thread-local cache.
4%  fetch_from_thread_cache("r1", miss_label="2f")
51:
6   lsr     r2, rINST, #8               // r2<- A
7   GET_VREG r0, r2                     // r0<- vA (object)
8   cmp     r0, #0
9   beq     .L${opcode}_resume
10   ldr     r2, [r0, #MIRROR_OBJECT_CLASS_OFFSET]
11   // Fast path: do a comparison without read barrier.
12   cmp     r1, r2
13   bne     ${slow_path}
14.L${opcode}_resume:
15   FETCH_ADVANCE_INST 2
16   GET_INST_OPCODE ip
17   GOTO_OPCODE ip
182:
19   EXPORT_PC
20   mov     r0, rSELF
21   ldr     r1, [sp]
22   mov     r2, rPC
23   bl      nterp_get_class
24   mov     r1, r0
25   b       1b
26
27%def op_check_cast_slow_path():
28   ldr     r3, [r1, #MIRROR_CLASS_ACCESS_FLAGS_OFFSET]
29   tst     r3, #MIRROR_CLASS_IS_INTERFACE_FLAG
30   bne     2f
31   ldr     r3, [r1, #MIRROR_CLASS_COMPONENT_TYPE_OFFSET]
32   cmp     r3, #0
33   bne     5f
341:
35   ldr     r2, [r2, #MIRROR_CLASS_SUPER_CLASS_OFFSET]
36   cmp     r1, r2
37   beq     .L${opcode}_resume
38   cmp     r2, #0
39   bne     1b
402:
41   TEST_IF_MARKING 4f
423:
43   EXPORT_PC
44   bl      art_quick_check_instance_of
45   b       .L${opcode}_resume
464:
47   bl      art_quick_read_barrier_mark_reg01
48   b       3b
495:
50   // Class in r1 is an array, r3 is the component type.
51   ldr     r2, [r2, #MIRROR_CLASS_COMPONENT_TYPE_OFFSET]
52   // Check if object is an array.
53   cmp     r2, #0
54   beq     2b
55   ldr     r4, [r3, #MIRROR_CLASS_SUPER_CLASS_OFFSET]
56   cmp     r4, #0
57   // If the super class of the component type is not null, go slow path.
58   bne     2b
59   ldrh    r3, [r3, #MIRROR_CLASS_OBJECT_PRIMITIVE_TYPE_OFFSET]
60   // Check if the object is a primitive array.
61   ldrh    r2, [r2, #MIRROR_CLASS_OBJECT_PRIMITIVE_TYPE_OFFSET]
62   orrs    r2, r3
63   beq     .L${opcode}_resume
64   // Go slow path for throwing the exception.
65   b 2b
66
67%def op_instance_of():
68%  slow_path = add_slow_path(op_instance_of_slow_path)
69   /* instance-of vA, vB, class@CCCC */
70   // Fast-path which gets the class from thread-local cache.
71%  fetch_from_thread_cache("r1", miss_label="2f")
721:
73   lsr     r2, rINST, #12              // r2<- B
74   GET_VREG r0, r2                     // r0<- vB (object)
75   cmp     r0, #0
76   beq     .L${opcode}_resume
77   ldr     r2, [r0, #MIRROR_OBJECT_CLASS_OFFSET]
78   // Fast path: do a comparison without read barrier.
79   cmp     r1, r2
80   bne     ${slow_path}
81.L${opcode}_set_one:
82   mov     r0, #1
83.L${opcode}_resume:
84   ubfx    r1, rINST, #8, #4           // r1<- A
85   SET_VREG r0, r1
86   FETCH_ADVANCE_INST 2
87   GET_INST_OPCODE ip
88   GOTO_OPCODE ip
892:
90   EXPORT_PC
91   mov     r0, rSELF
92   ldr     r1, [sp]
93   mov     r2, rPC
94   bl      nterp_get_class
95   mov     r1, r0
96   b       1b
97
98%def op_instance_of_slow_path():
99   // Go slow path if we are marking. Checking now allows
100   // not going to slow path if the super class hierarchy check fails.
101   TEST_IF_MARKING 4f
102   ldr     r3, [r1, #MIRROR_CLASS_ACCESS_FLAGS_OFFSET]
103   tst     r3, #MIRROR_CLASS_IS_INTERFACE_FLAG
104   bne     5f
105   ldr     r3, [r1, #MIRROR_CLASS_COMPONENT_TYPE_OFFSET]
106   cmp     r3, #0
107   bne     3f
1081:
109   ldr     r2, [r2, #MIRROR_CLASS_SUPER_CLASS_OFFSET]
110   cmp     r1, r2
111   beq     .L${opcode}_set_one
112   cmp     r2, #0
113   bne     1b
1142:
115   mov     r0, #0
116   b       .L${opcode}_resume
1173:
118   // Class in r1 is an array, r3 is the component type.
119   ldr     r2, [r2, #MIRROR_CLASS_COMPONENT_TYPE_OFFSET]
120   // Check if object is an array.
121   cmp     r2, #0
122   beq     2b
123   ldr     r4, [r3, #MIRROR_CLASS_SUPER_CLASS_OFFSET]
124   cmp     r4, #0
125   bne     5f
126   ldrh    r3, [r3, #MIRROR_CLASS_OBJECT_PRIMITIVE_TYPE_OFFSET]
127   // Check if the object is a primitive array.
128   ldrh    r2, [r2, #MIRROR_CLASS_OBJECT_PRIMITIVE_TYPE_OFFSET]
129   orr     r0, r2, r3
130   clz     r0, r0
131   lsrs    r0, r0, #5
132   b       .L${opcode}_resume
1334:
134   bl      art_quick_read_barrier_mark_reg01
1355:
136   EXPORT_PC
137   bl      artInstanceOfFromCode
138   b       .L${opcode}_resume
139
140%def op_iget_boolean():
141%  op_iget(load="ldrb", wide="0", is_object="0")
142
143%def op_iget_byte():
144%  op_iget(load="ldrsb", wide="0", is_object="0")
145
146%def op_iget_char():
147%  op_iget(load="ldrh", wide="0", is_object="0")
148
149%def op_iget_short():
150%  op_iget(load="ldrsh", wide="0", is_object="0")
151
152%def op_iget(load="ldr", wide="0", is_object="0"):
153%  slow_path = add_slow_path(op_iget_slow_path, load, wide, is_object)
154   // Fast-path which gets the field from thread-local cache.
155%  fetch_from_thread_cache("r0", miss_label=slow_path)
156.L${opcode}_resume:
157   lsr     r2, rINST, #12              // r2<- B
158   GET_VREG r3, r2                     // r3<- object we're operating on
159   ubfx    r2, rINST, #8, #4           // r2<- A
160   cmp     r3, #0
161   beq     common_errNullObject        // object was null
162   .if $wide
163   add     r3, r3, r0
164   ldrd    r0, r1, [r3]
165   CLEAR_SHADOW_PAIR r2, ip, lr
166   VREG_INDEX_TO_ADDR r2, r2
167   SET_VREG_WIDE_BY_ADDR r0, r1, r2    // fp[A] <- value
168   .elseif $is_object
169   $load   r0, [r3, r0]
170   TEST_IF_MARKING .L${opcode}_read_barrier
171.L${opcode}_resume_after_read_barrier:
172   SET_VREG_OBJECT r0, r2              // fp[A] <- value
173   .else
174   $load   r0, [r3, r0]
175   SET_VREG r0, r2                     // fp[A] <- value
176   .endif
177   FETCH_ADVANCE_INST 2
178   GET_INST_OPCODE ip
179   GOTO_OPCODE ip
180   .if $is_object
181.L${opcode}_read_barrier:
182   bl      art_quick_read_barrier_mark_reg00
183   b       .L${opcode}_resume_after_read_barrier
184   .endif
185
186%def op_iget_slow_path(load, wide, is_object):
187   mov     r0, rSELF
188   ldr     r1, [sp]
189   mov     r2, rPC
190   mov     r3, #0
191   EXPORT_PC
192   bl      nterp_get_instance_field_offset
193   cmp     r0, #0
194   bge     .L${opcode}_resume
195   CLEAR_INSTANCE_VOLATILE_MARKER r0
196   lsr     r2, rINST, #12              // r2<- B
197   GET_VREG r3, r2                     // r3<- object we're operating on
198   ubfx    r2, rINST, #8, #4           // r2<- A
199   cmp     r3, #0
200   beq     common_errNullObject            // object was null
201   .if $wide
202   add     ip, r3, r0
203   ATOMIC_LOAD64 ip, r0, r1, r3, .L${opcode}_slow_path_atomic_load
204   dmb     ish
205   CLEAR_SHADOW_PAIR r2, ip, lr
206   VREG_INDEX_TO_ADDR r2, r2
207   SET_VREG_WIDE_BY_ADDR r0, r1, r2    // fp[A] <- value
208   .else
209   $load   r0, [r3, r0]
210   dmb     ish
211   .if $is_object
212   TEST_IF_MARKING .L${opcode}_read_barrier
213   SET_VREG_OBJECT r0, r2              // fp[A] <- value
214   .else
215   SET_VREG r0, r2                     // fp[A] <- value
216   .endif
217   .endif
218   FETCH_ADVANCE_INST 2
219   GET_INST_OPCODE ip
220   GOTO_OPCODE ip
221
222%def op_iget_wide():
223%  op_iget(load="ldr", wide="1", is_object="0")
224
225%def op_iget_object():
226%  op_iget(load="ldr", wide="0", is_object="1")
227
228%def op_iput_boolean():
229%  op_iput(store="strb", wide="0", is_object="0")
230
231%def op_iput_byte():
232%  op_iput(store="strb", wide="0", is_object="0")
233
234%def op_iput_char():
235%  op_iput(store="strh", wide="0", is_object="0")
236
237%def op_iput_short():
238%  op_iput(store="strh", wide="0", is_object="0")
239
240%def op_iput(store="str", wide="0", is_object="0"):
241%  slow_path = add_slow_path(op_iput_slow_path, store, wide, is_object)
242   .if !$wide
243   ubfx    r4, rINST, #8, #4           // r4<- A
244   GET_VREG r4, r4                     // r4 <- v[A]
245   .endif
246   // Fast-path which gets the field from thread-local cache.
247%  fetch_from_thread_cache("r0", miss_label=slow_path)
248.L${opcode}_resume:
249   lsr     r1, rINST, #12              // r1<- B
250   GET_VREG r1, r1                     // vB (object we're operating on)
251   cmp     r1, #0
252   beq     common_errNullObject
253   .if $wide
254   ubfx    r4, rINST, #8, #4           // r4<- A
255   VREG_INDEX_TO_ADDR r4, r4
256   GET_VREG_WIDE_BY_ADDR r2, r3, r4      // fp[A] <- value
257   add     r1, r1, r0
258   strd    r2, r3, [r1]
259   .else
260   $store  r4, [r1, r0]
261   WRITE_BARRIER_IF_OBJECT $is_object, r4, r1, .L${opcode}_skip_write_barrier, r0
262   .endif
263   FETCH_ADVANCE_INST 2
264   GET_INST_OPCODE ip
265   GOTO_OPCODE ip
266
267%def op_iput_slow_path(store, wide, is_object):
268   mov     r0, rSELF
269   ldr     r1, [sp]
270   mov     r2, rPC
271   .if $is_object
272   mov     r3, r4
273   .else
274   mov     r3, #0
275   .endif
276   EXPORT_PC
277   bl      nterp_get_instance_field_offset
278   .if $is_object
279   // Reload the value as it may have moved.
280   ubfx    r4, rINST, #8, #4           // r4<- A
281   GET_VREG r4, r4                     // r4 <- v[A]
282   .endif
283   cmp     r0, #0
284   bge     .L${opcode}_resume
285   CLEAR_INSTANCE_VOLATILE_MARKER r0
286   .if $wide
287   lsr     r4, rINST, #12              // r4<- B
288   ubfx    r1, rINST, #8, #4           // r1<- A
289   GET_VREG r4, r4                     // vB (object we're operating on)
290   cmp     r4, #0
291   beq     common_errNullObject
292   VREG_INDEX_TO_ADDR r1, r1
293   GET_VREG_WIDE_BY_ADDR r2, r3, r1
294   add     ip, r4, r0
295   dmb     ish
296   ATOMIC_STORE64 ip, r2, r3, r0, r1, .L${opcode}_slow_path_atomic_store
297   dmb     ish
298   .else
299   lsr     r1, rINST, #12              // r4<- B
300   GET_VREG r1, r1                     // vB (object we're operating on)
301   cmp     r1, #0
302   beq     common_errNullObject
303   dmb     ish
304   $store  r4, [r1, r0]
305   dmb     ish
306   WRITE_BARRIER_IF_OBJECT $is_object, r4, r1, .L${opcode}_slow_path_skip_write_barrier, r0
307   .endif
308   FETCH_ADVANCE_INST 2
309   GET_INST_OPCODE ip
310   GOTO_OPCODE ip
311
312%def op_iput_wide():
313%  op_iput(store="str", wide="1", is_object="0")
314
315%def op_iput_object():
316%  op_iput(store="str", wide="0", is_object="1")
317
318%def op_sget_boolean():
319%  op_sget(load="ldrb", wide="0", is_object="0")
320
321%def op_sget_byte():
322%  op_sget(load="ldrsb", wide="0", is_object="0")
323
324%def op_sget_char():
325%  op_sget(load="ldrh", wide="0", is_object="0")
326
327%def op_sget_short():
328%  op_sget(load="ldrsh", wide="0", is_object="0")
329
330%def op_sget(load="ldr", wide="0", is_object="0"):
331%  slow_path = add_slow_path(op_sget_slow_path, load, wide, is_object)
332   // Fast-path which gets the field from thread-local cache.
333%  fetch_from_thread_cache("r0", miss_label=slow_path)
334.L${opcode}_resume:
335   ldr     r1, [r0, #ART_FIELD_OFFSET_OFFSET]
336   lsr     r2, rINST, #8               // r2 <- A
337   ldr     r0, [r0, #ART_FIELD_DECLARING_CLASS_OFFSET]
338   TEST_IF_MARKING .L${opcode}_read_barrier
339.L${opcode}_resume_after_read_barrier:
340   .if $wide
341   add     r0, r0, r1
342   ldrd    r0, r1, [r0]
343   CLEAR_SHADOW_PAIR r2, ip, lr
344   VREG_INDEX_TO_ADDR r2, r2
345   SET_VREG_WIDE_BY_ADDR r0, r1, r2    // fp[A] <- value
346   .elseif $is_object
347   $load   r0, [r0, r1]
348   // No need to check the marking register, we know it's not set here.
349.L${opcode}_after_reference_load:
350   SET_VREG_OBJECT r0, r2              // fp[A] <- value
351   .else
352   $load   r0, [r0, r1]
353   SET_VREG r0, r2                     // fp[A] <- value
354   .endif
355   FETCH_ADVANCE_INST 2
356   GET_INST_OPCODE ip
357   GOTO_OPCODE ip
358.L${opcode}_read_barrier:
359   bl      art_quick_read_barrier_mark_reg00
360   .if $is_object
361   ldr     r0, [r0, r1]
362.L${opcode}_mark_after_load:
363   // Here, we know the marking register is set.
364   bl      art_quick_read_barrier_mark_reg00
365   b       .L${opcode}_after_reference_load
366   .else
367   b       .L${opcode}_resume_after_read_barrier
368   .endif
369
370%def op_sget_slow_path(load="ldr", wide="0", is_object="0"):
371   mov     r0, rSELF
372   ldr     r1, [sp]
373   mov     r2, rPC
374   mov     r3, #0
375   EXPORT_PC
376   bl      nterp_get_static_field
377   tst     r0, #1
378   beq     .L${opcode}_resume
379   CLEAR_STATIC_VOLATILE_MARKER r0
380   ldr     r1, [r0, #ART_FIELD_OFFSET_OFFSET]
381   lsr     r2, rINST, #8               // r2 <- A
382   ldr     r0, [r0, #ART_FIELD_DECLARING_CLASS_OFFSET]
383   TEST_IF_MARKING .L${opcode}_slow_path_read_barrier
384.L${opcode}_slow_path_resume_after_read_barrier:
385   .if $wide
386   add     ip, r0, r1
387   ATOMIC_LOAD64 ip, r0, r1, r3, .L${opcode}_slow_path_atomic_load
388   dmb     ish
389   CLEAR_SHADOW_PAIR r2, ip, lr
390   VREG_INDEX_TO_ADDR r2, r2
391   SET_VREG_WIDE_BY_ADDR r0, r1, r2    // fp[A] <- value
392   .else
393   $load   r0, [r0, r1]
394   dmb     ish
395   .if $is_object
396   TEST_IF_MARKING .L${opcode}_mark_after_load
397   SET_VREG_OBJECT r0, r2              // fp[A] <- value
398   .else
399   SET_VREG r0, r2                     // fp[A] <- value
400   .endif
401   .endif
402   FETCH_ADVANCE_INST 2
403   GET_INST_OPCODE ip
404   GOTO_OPCODE ip
405.L${opcode}_slow_path_read_barrier:
406   bl      art_quick_read_barrier_mark_reg00
407   b       .L${opcode}_slow_path_resume_after_read_barrier
408
409%def op_sget_wide():
410%  op_sget(load="ldr", wide="1", is_object="0")
411
412%def op_sget_object():
413%  op_sget(load="ldr", wide="0", is_object="1")
414
415%def op_sput_boolean():
416%  op_sput(store="strb", wide="0", is_object="0")
417
418%def op_sput_byte():
419%  op_sput(store="strb", wide="0", is_object="0")
420
421%def op_sput_char():
422%  op_sput(store="strh", wide="0", is_object="0")
423
424%def op_sput_short():
425%  op_sput(store="strh", wide="0", is_object="0")
426
427%def op_sput(store="str", wide="0", is_object="0"):
428%  slow_path = add_slow_path(op_sput_slow_path, store, wide, is_object)
429   .if !$wide
430   lsr     r4, rINST, #8               // r4 <- A
431   GET_VREG r4, r4                     // r4 <- v[A]
432   .endif
433   // Fast-path which gets the field from thread-local cache.
434%  fetch_from_thread_cache("r0", miss_label=slow_path)
435.L${opcode}_resume:
436   ldr     r1, [r0, #ART_FIELD_OFFSET_OFFSET]
437   ldr     r0, [r0, #ART_FIELD_DECLARING_CLASS_OFFSET]
438   TEST_IF_MARKING .L${opcode}_read_barrier
439.L${opcode}_resume_after_read_barrier:
440   .if $wide
441   lsr     r2, rINST, #8               // r2 <- A
442   VREG_INDEX_TO_ADDR r2, r2
443   GET_VREG_WIDE_BY_ADDR r2, r3, r2    // fp[A] <- value
444   add     r0, r0, r1
445   strd    r2, r3, [r0]
446   .else
447   $store  r4, [r0, r1]
448   WRITE_BARRIER_IF_OBJECT $is_object, r4, r0, .L${opcode}_skip_write_barrier, r1
449   .endif
450   FETCH_ADVANCE_INST 2
451   GET_INST_OPCODE ip
452   GOTO_OPCODE ip
453.L${opcode}_read_barrier:
454   bl      art_quick_read_barrier_mark_reg00
455   b       .L${opcode}_resume_after_read_barrier
456
457%def op_sput_slow_path(store, wide, is_object):
458   mov     r0, rSELF
459   ldr     r1, [sp]
460   mov     r2, rPC
461   .if $is_object
462   mov     r3, r4
463   .else
464   mov     r3, #0
465   .endif
466   EXPORT_PC
467   bl      nterp_get_static_field
468   .if $is_object
469   // Reload the value as it may have moved.
470   lsr     r4, rINST, #8               // r4 <- A
471   GET_VREG r4, r4                     // r4 <- v[A]
472   .endif
473   tst     r0, #1
474   beq     .L${opcode}_resume
475   CLEAR_STATIC_VOLATILE_MARKER r0
476   ldr     r1, [r0, #ART_FIELD_OFFSET_OFFSET]
477   ldr     r0, [r0, #ART_FIELD_DECLARING_CLASS_OFFSET]
478   TEST_IF_MARKING .L${opcode}_slow_path_read_barrier
479.L${opcode}_slow_path_resume_after_read_barrier:
480   .if $wide
481   lsr     r2, rINST, #8               // r2 <- A
482   VREG_INDEX_TO_ADDR r2, r2
483   GET_VREG_WIDE_BY_ADDR r2, r3, r2
484   add     ip, r0, r1
485   dmb     ish
486   ATOMIC_STORE64 ip, r2, r3, r0, r1, .L${opcode}_slow_path_atomic_store
487   dmb     ish
488   .else
489   dmb     ish
490   $store  r4, [r0, r1]
491   dmb     ish
492   WRITE_BARRIER_IF_OBJECT $is_object, r4, r0, .L${opcode}_slow_path_skip_write_barrier, r1
493   .endif
494   FETCH_ADVANCE_INST 2
495   GET_INST_OPCODE ip
496   GOTO_OPCODE ip
497.L${opcode}_slow_path_read_barrier:
498   bl      art_quick_read_barrier_mark_reg00
499   b       .L${opcode}_slow_path_resume_after_read_barrier
500
501%def op_sput_wide():
502%  op_sput(store="str", wide="1", is_object="0")
503
504%def op_sput_object():
505%  op_sput(store="str", wide="0", is_object="1")
506
507%def op_new_instance():
508   // The routine is too big to fit in a handler, so jump to it.
509   EXPORT_PC
510   // Fast-path which gets the class from thread-local cache.
511%  fetch_from_thread_cache("r0", miss_label="2f")
512   TEST_IF_MARKING 3f
5134:
514   ldr     lr, [rSELF, #THREAD_ALLOC_OBJECT_ENTRYPOINT_OFFSET]
515   blx     lr
516   dmb     ishst                        // need fence for making object's class visible
5171:
518   lsr     r1, rINST, #8                // r1 <- A
519   SET_VREG_OBJECT r0, r1               // fp[A] <- value
520   FETCH_ADVANCE_INST 2
521   GET_INST_OPCODE ip
522   GOTO_OPCODE ip
5232:
524   mov     r0, rSELF
525   ldr     r1, [sp]
526   mov     r2, rPC
527   bl      nterp_allocate_object
528   b       1b
5293:
530   bl      art_quick_read_barrier_mark_reg00
531   b       4b
532