1/* 2 * Copyright (C) 2014 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17#include "asm_support_mips64.S" 18 19#include "arch/quick_alloc_entrypoints.S" 20 21 .set noreorder 22 .balign 16 23 24 /* Deliver the given exception */ 25 .extern artDeliverExceptionFromCode 26 /* Deliver an exception pending on a thread */ 27 .extern artDeliverPendingExceptionFromCode 28 29 /* 30 * Macro that sets up $gp and stores the previous $gp value to $t8. 31 * This macro modifies v1 and t8. 32 */ 33.macro SETUP_GP 34 move $v1, $ra 35 bal 1f 36 nop 371: 38 .cpsetup $ra, $t8, 1b 39 move $ra, $v1 40.endm 41 42 /* 43 * Macro that sets up the callee save frame to conform with 44 * Runtime::CreateCalleeSaveMethod(kSaveAllCalleeSaves) 45 * callee-save: padding + $f24-$f31 + $s0-$s7 + $gp + $ra + $s8 = 19 total + 1x8 bytes padding 46 */ 47.macro SETUP_SAVE_ALL_CALLEE_SAVES_FRAME 48 daddiu $sp, $sp, -160 49 .cfi_adjust_cfa_offset 160 50 51 // Ugly compile-time check, but we only have the preprocessor. 52#if (FRAME_SIZE_SAVE_ALL_CALLEE_SAVES != 160) 53#error "FRAME_SIZE_SAVE_ALL_CALLEE_SAVES(MIPS64) size not as expected." 54#endif 55 56 sd $ra, 152($sp) 57 .cfi_rel_offset 31, 152 58 sd $s8, 144($sp) 59 .cfi_rel_offset 30, 144 60 sd $t8, 136($sp) # t8 holds caller's gp, now save it to the stack. 61 .cfi_rel_offset 28, 136 # Value from gp is pushed, so set the cfi offset accordingly. 62 sd $s7, 128($sp) 63 .cfi_rel_offset 23, 128 64 sd $s6, 120($sp) 65 .cfi_rel_offset 22, 120 66 sd $s5, 112($sp) 67 .cfi_rel_offset 21, 112 68 sd $s4, 104($sp) 69 .cfi_rel_offset 20, 104 70 sd $s3, 96($sp) 71 .cfi_rel_offset 19, 96 72 sd $s2, 88($sp) 73 .cfi_rel_offset 18, 88 74 sd $s1, 80($sp) 75 .cfi_rel_offset 17, 80 76 sd $s0, 72($sp) 77 .cfi_rel_offset 16, 72 78 79 // FP callee-saves 80 s.d $f31, 64($sp) 81 s.d $f30, 56($sp) 82 s.d $f29, 48($sp) 83 s.d $f28, 40($sp) 84 s.d $f27, 32($sp) 85 s.d $f26, 24($sp) 86 s.d $f25, 16($sp) 87 s.d $f24, 8($sp) 88 89 # load appropriate callee-save-method 90 ld $t1, %got(_ZN3art7Runtime9instance_E)($gp) 91 ld $t1, 0($t1) 92 ld $t1, RUNTIME_SAVE_ALL_CALLEE_SAVES_METHOD_OFFSET($t1) 93 sd $t1, 0($sp) # Place ArtMethod* at bottom of stack. 94 sd $sp, THREAD_TOP_QUICK_FRAME_OFFSET(rSELF) # Place sp in Thread::Current()->top_quick_frame. 95.endm 96 97 /* 98 * Macro that sets up the callee save frame to conform with 99 * Runtime::CreateCalleeSaveMethod(kSaveRefsOnly). Restoration assumes 100 * non-moving GC. 101 * Does not include rSUSPEND or rSELF 102 * callee-save: padding + $s2-$s7 + $gp + $ra + $s8 = 9 total + 1x8 bytes padding 103 */ 104.macro SETUP_SAVE_REFS_ONLY_FRAME 105 daddiu $sp, $sp, -80 106 .cfi_adjust_cfa_offset 80 107 108 // Ugly compile-time check, but we only have the preprocessor. 109#if (FRAME_SIZE_SAVE_REFS_ONLY != 80) 110#error "FRAME_SIZE_SAVE_REFS_ONLY(MIPS64) size not as expected." 111#endif 112 113 sd $ra, 72($sp) 114 .cfi_rel_offset 31, 72 115 sd $s8, 64($sp) 116 .cfi_rel_offset 30, 64 117 sd $t8, 56($sp) # t8 holds caller's gp, now save it to the stack. 118 .cfi_rel_offset 28, 56 # Value from gp is pushed, so set the cfi offset accordingly. 119 sd $s7, 48($sp) 120 .cfi_rel_offset 23, 48 121 sd $s6, 40($sp) 122 .cfi_rel_offset 22, 40 123 sd $s5, 32($sp) 124 .cfi_rel_offset 21, 32 125 sd $s4, 24($sp) 126 .cfi_rel_offset 20, 24 127 sd $s3, 16($sp) 128 .cfi_rel_offset 19, 16 129 sd $s2, 8($sp) 130 .cfi_rel_offset 18, 8 131 # load appropriate callee-save-method 132 ld $t1, %got(_ZN3art7Runtime9instance_E)($gp) 133 ld $t1, 0($t1) 134 ld $t1, RUNTIME_SAVE_REFS_ONLY_METHOD_OFFSET($t1) 135 sd $t1, 0($sp) # Place Method* at bottom of stack. 136 sd $sp, THREAD_TOP_QUICK_FRAME_OFFSET(rSELF) # Place sp in Thread::Current()->top_quick_frame. 137.endm 138 139.macro RESTORE_SAVE_REFS_ONLY_FRAME 140 ld $ra, 72($sp) 141 .cfi_restore 31 142 ld $s8, 64($sp) 143 .cfi_restore 30 144 ld $t8, 56($sp) # Restore gp back to it's temp storage. 145 .cfi_restore 28 146 ld $s7, 48($sp) 147 .cfi_restore 23 148 ld $s6, 40($sp) 149 .cfi_restore 22 150 ld $s5, 32($sp) 151 .cfi_restore 21 152 ld $s4, 24($sp) 153 .cfi_restore 20 154 ld $s3, 16($sp) 155 .cfi_restore 19 156 ld $s2, 8($sp) 157 .cfi_restore 18 158 daddiu $sp, $sp, 80 159 .cfi_adjust_cfa_offset -80 160 .cpreturn 161.endm 162 163.macro RESTORE_SAVE_REFS_ONLY_FRAME_AND_RETURN 164 ld $ra, 72($sp) 165 .cfi_restore 31 166 ld $s8, 64($sp) 167 .cfi_restore 30 168 ld $t8, 56($sp) # Restore gp back to it's temp storage. 169 .cfi_restore 28 170 ld $s7, 48($sp) 171 .cfi_restore 23 172 ld $s6, 40($sp) 173 .cfi_restore 22 174 ld $s5, 32($sp) 175 .cfi_restore 21 176 ld $s4, 24($sp) 177 .cfi_restore 20 178 ld $s3, 16($sp) 179 .cfi_restore 19 180 ld $s2, 8($sp) 181 .cfi_restore 18 182 .cpreturn 183 jalr $zero, $ra 184 daddiu $sp, $sp, 80 185 .cfi_adjust_cfa_offset -80 186.endm 187 188// This assumes the top part of these stack frame types are identical. 189#define REFS_AND_ARGS_MINUS_REFS_SIZE (FRAME_SIZE_SAVE_REFS_AND_ARGS - FRAME_SIZE_SAVE_REFS_ONLY) 190 191 /* 192 * Individually usable part of macro SETUP_SAVE_REFS_AND_ARGS_FRAME_INTERNAL. 193 */ 194.macro SETUP_SAVE_REFS_AND_ARGS_FRAME_S4_THRU_S8 195 sd $s8, 192($sp) 196 .cfi_rel_offset 30, 192 197 sd $s7, 176($sp) 198 .cfi_rel_offset 23, 176 199 sd $s6, 168($sp) 200 .cfi_rel_offset 22, 168 201 sd $s5, 160($sp) 202 .cfi_rel_offset 21, 160 203 sd $s4, 152($sp) 204 .cfi_rel_offset 20, 152 205.endm 206 207.macro SETUP_SAVE_REFS_AND_ARGS_FRAME_INTERNAL save_s4_thru_s8=1 208 daddiu $sp, $sp, -208 209 .cfi_adjust_cfa_offset 208 210 211 // Ugly compile-time check, but we only have the preprocessor. 212#if (FRAME_SIZE_SAVE_REFS_AND_ARGS != 208) 213#error "FRAME_SIZE_SAVE_REFS_AND_ARGS(MIPS64) size not as expected." 214#endif 215 216 sd $ra, 200($sp) # = kQuickCalleeSaveFrame_RefAndArgs_LrOffset 217 .cfi_rel_offset 31, 200 218 sd $t8, 184($sp) # t8 holds caller's gp, now save it to the stack. 219 .cfi_rel_offset 28, 184 # Value from gp is pushed, so set the cfi offset accordingly. 220 .if \save_s4_thru_s8 221 SETUP_SAVE_REFS_AND_ARGS_FRAME_S4_THRU_S8 222 .endif 223 sd $s3, 144($sp) 224 .cfi_rel_offset 19, 144 225 sd $s2, 136($sp) 226 .cfi_rel_offset 18, 136 227 sd $a7, 128($sp) 228 .cfi_rel_offset 11, 128 229 sd $a6, 120($sp) 230 .cfi_rel_offset 10, 120 231 sd $a5, 112($sp) 232 .cfi_rel_offset 9, 112 233 sd $a4, 104($sp) 234 .cfi_rel_offset 8, 104 235 sd $a3, 96($sp) 236 .cfi_rel_offset 7, 96 237 sd $a2, 88($sp) 238 .cfi_rel_offset 6, 88 239 sd $a1, 80($sp) # = kQuickCalleeSaveFrame_RefAndArgs_Gpr1Offset 240 .cfi_rel_offset 5, 80 241 242 s.d $f19, 72($sp) 243 s.d $f18, 64($sp) 244 s.d $f17, 56($sp) 245 s.d $f16, 48($sp) 246 s.d $f15, 40($sp) 247 s.d $f14, 32($sp) 248 s.d $f13, 24($sp) # = kQuickCalleeSaveFrame_RefAndArgs_Fpr1Offset 249 s.d $f12, 16($sp) # This isn't necessary to store. 250 # 1x8 bytes padding + Method* 251.endm 252 253 /* 254 * Macro that sets up the callee save frame to conform with 255 * Runtime::CreateCalleeSaveMethod(kSaveRefsAndArgs). Restoration assumes 256 * non-moving GC. 257 * callee-save: padding + $f12-$f19 + $a1-$a7 + $s2-$s7 + $gp + $ra + $s8 = 24 total + 1 words padding + Method* 258 */ 259.macro SETUP_SAVE_REFS_AND_ARGS_FRAME save_s4_thru_s8_only=0 260 .if \save_s4_thru_s8_only 261 // It is expected that `SETUP_SAVE_REFS_AND_ARGS_FRAME_INTERNAL /* save_s4_thru_s8 */ 0` 262 // has been done prior to `SETUP_SAVE_REFS_AND_ARGS_FRAME /* save_s4_thru_s8_only */ 1`. 263 SETUP_SAVE_REFS_AND_ARGS_FRAME_S4_THRU_S8 264 .else 265 SETUP_SAVE_REFS_AND_ARGS_FRAME_INTERNAL 266 .endif 267 # load appropriate callee-save-method 268 ld $t1, %got(_ZN3art7Runtime9instance_E)($gp) 269 ld $t1, 0($t1) 270 ld $t1, RUNTIME_SAVE_REFS_AND_ARGS_METHOD_OFFSET($t1) 271 sd $t1, 0($sp) # Place Method* at bottom of stack. 272 sd $sp, THREAD_TOP_QUICK_FRAME_OFFSET(rSELF) # Place sp in Thread::Current()->top_quick_frame. 273.endm 274 275.macro SETUP_SAVE_REFS_AND_ARGS_FRAME_WITH_METHOD_IN_A0 276 SETUP_SAVE_REFS_AND_ARGS_FRAME_INTERNAL 277 sd $a0, 0($sp) # Place Method* at bottom of stack. 278 sd $sp, THREAD_TOP_QUICK_FRAME_OFFSET(rSELF) # Place sp in Thread::Current()->top_quick_frame. 279.endm 280 281 /* 282 * Individually usable part of macro RESTORE_SAVE_REFS_AND_ARGS_FRAME. 283 */ 284.macro RESTORE_SAVE_REFS_AND_ARGS_FRAME_A1 285 ld $a1, 80($sp) 286 .cfi_restore 5 287.endm 288 289.macro RESTORE_SAVE_REFS_AND_ARGS_FRAME restore_s4_thru_s8=1 290 ld $ra, 200($sp) 291 .cfi_restore 31 292 .if \restore_s4_thru_s8 293 ld $s8, 192($sp) 294 .cfi_restore 30 295 .endif 296 ld $t8, 184($sp) # Restore gp back to it's temp storage. 297 .cfi_restore 28 298 .if \restore_s4_thru_s8 299 ld $s7, 176($sp) 300 .cfi_restore 23 301 ld $s6, 168($sp) 302 .cfi_restore 22 303 ld $s5, 160($sp) 304 .cfi_restore 21 305 ld $s4, 152($sp) 306 .cfi_restore 20 307 .endif 308 ld $s3, 144($sp) 309 .cfi_restore 19 310 ld $s2, 136($sp) 311 .cfi_restore 18 312 ld $a7, 128($sp) 313 .cfi_restore 11 314 ld $a6, 120($sp) 315 .cfi_restore 10 316 ld $a5, 112($sp) 317 .cfi_restore 9 318 ld $a4, 104($sp) 319 .cfi_restore 8 320 ld $a3, 96($sp) 321 .cfi_restore 7 322 ld $a2, 88($sp) 323 .cfi_restore 6 324 RESTORE_SAVE_REFS_AND_ARGS_FRAME_A1 325 326 l.d $f19, 72($sp) 327 l.d $f18, 64($sp) 328 l.d $f17, 56($sp) 329 l.d $f16, 48($sp) 330 l.d $f15, 40($sp) 331 l.d $f14, 32($sp) 332 l.d $f13, 24($sp) 333 l.d $f12, 16($sp) 334 335 .cpreturn 336 daddiu $sp, $sp, 208 337 .cfi_adjust_cfa_offset -208 338.endm 339 340 /* 341 * Macro that sets up the callee save frame to conform with 342 * Runtime::CreateCalleeSaveMethod(kSaveEverything). 343 * when the $sp has already been decremented by FRAME_SIZE_SAVE_EVERYTHING. 344 * callee-save: $at + $v0-$v1 + $a0-$a7 + $t0-$t3 + $s0-$s7 + $t8-$t9 + $gp + $s8 + $ra + $s8, 345 * $f0-$f31; 28(GPR)+ 32(FPR) + 1x8 bytes padding + method* 346 * This macro sets up $gp; entrypoints using it should start with ENTRY_NO_GP. 347 */ 348.macro SETUP_SAVE_EVERYTHING_FRAME_DECREMENTED_SP runtime_method_offset = RUNTIME_SAVE_EVERYTHING_METHOD_OFFSET 349 // Ugly compile-time check, but we only have the preprocessor. 350#if (FRAME_SIZE_SAVE_EVERYTHING != 496) 351#error "FRAME_SIZE_SAVE_EVERYTHING(MIPS64) size not as expected." 352#endif 353 354 // Save core registers. 355 sd $ra, 488($sp) 356 .cfi_rel_offset 31, 488 357 sd $s8, 480($sp) 358 .cfi_rel_offset 30, 480 359 sd $t9, 464($sp) 360 .cfi_rel_offset 25, 464 361 sd $t8, 456($sp) 362 .cfi_rel_offset 24, 456 363 sd $s7, 448($sp) 364 .cfi_rel_offset 23, 448 365 sd $s6, 440($sp) 366 .cfi_rel_offset 22, 440 367 sd $s5, 432($sp) 368 .cfi_rel_offset 21, 432 369 sd $s4, 424($sp) 370 .cfi_rel_offset 20, 424 371 sd $s3, 416($sp) 372 .cfi_rel_offset 19, 416 373 sd $s2, 408($sp) 374 .cfi_rel_offset 18, 408 375 sd $s1, 400($sp) 376 .cfi_rel_offset 17, 400 377 sd $s0, 392($sp) 378 .cfi_rel_offset 16, 392 379 sd $t3, 384($sp) 380 .cfi_rel_offset 15, 384 381 sd $t2, 376($sp) 382 .cfi_rel_offset 14, 376 383 sd $t1, 368($sp) 384 .cfi_rel_offset 13, 368 385 sd $t0, 360($sp) 386 .cfi_rel_offset 12, 360 387 sd $a7, 352($sp) 388 .cfi_rel_offset 11, 352 389 sd $a6, 344($sp) 390 .cfi_rel_offset 10, 344 391 sd $a5, 336($sp) 392 .cfi_rel_offset 9, 336 393 sd $a4, 328($sp) 394 .cfi_rel_offset 8, 328 395 sd $a3, 320($sp) 396 .cfi_rel_offset 7, 320 397 sd $a2, 312($sp) 398 .cfi_rel_offset 6, 312 399 sd $a1, 304($sp) 400 .cfi_rel_offset 5, 304 401 sd $a0, 296($sp) 402 .cfi_rel_offset 4, 296 403 sd $v1, 288($sp) 404 .cfi_rel_offset 3, 288 405 sd $v0, 280($sp) 406 .cfi_rel_offset 2, 280 407 408 // Set up $gp, clobbering $ra and using the branch delay slot for a useful instruction. 409 bal 1f 410 .set push 411 .set noat 412 sd $at, 272($sp) 413 .cfi_rel_offset 1, 272 414 .set pop 4151: 416 .cpsetup $ra, 472, 1b 417 418 // Save FP registers. 419 s.d $f31, 264($sp) 420 s.d $f30, 256($sp) 421 s.d $f29, 248($sp) 422 s.d $f28, 240($sp) 423 s.d $f27, 232($sp) 424 s.d $f26, 224($sp) 425 s.d $f25, 216($sp) 426 s.d $f24, 208($sp) 427 s.d $f23, 200($sp) 428 s.d $f22, 192($sp) 429 s.d $f21, 184($sp) 430 s.d $f20, 176($sp) 431 s.d $f19, 168($sp) 432 s.d $f18, 160($sp) 433 s.d $f17, 152($sp) 434 s.d $f16, 144($sp) 435 s.d $f15, 136($sp) 436 s.d $f14, 128($sp) 437 s.d $f13, 120($sp) 438 s.d $f12, 112($sp) 439 s.d $f11, 104($sp) 440 s.d $f10, 96($sp) 441 s.d $f9, 88($sp) 442 s.d $f8, 80($sp) 443 s.d $f7, 72($sp) 444 s.d $f6, 64($sp) 445 s.d $f5, 56($sp) 446 s.d $f4, 48($sp) 447 s.d $f3, 40($sp) 448 s.d $f2, 32($sp) 449 s.d $f1, 24($sp) 450 s.d $f0, 16($sp) 451 452 # load appropriate callee-save-method 453 ld $t1, %got(_ZN3art7Runtime9instance_E)($gp) 454 ld $t1, 0($t1) 455 ld $t1, \runtime_method_offset($t1) 456 sd $t1, 0($sp) # Place ArtMethod* at bottom of stack. 457 # Place sp in Thread::Current()->top_quick_frame. 458 sd $sp, THREAD_TOP_QUICK_FRAME_OFFSET(rSELF) 459.endm 460 461 /* 462 * Macro that sets up the callee save frame to conform with 463 * Runtime::CreateCalleeSaveMethod(kSaveEverything). 464 * callee-save: $at + $v0-$v1 + $a0-$a7 + $t0-$t3 + $s0-$s7 + $t8-$t9 + $gp + $s8 + $ra + $s8, 465 * $f0-$f31; 28(GPR)+ 32(FPR) + 1x8 bytes padding + method* 466 * This macro sets up $gp; entrypoints using it should start with ENTRY_NO_GP. 467 */ 468.macro SETUP_SAVE_EVERYTHING_FRAME runtime_method_offset = RUNTIME_SAVE_EVERYTHING_METHOD_OFFSET 469 daddiu $sp, $sp, -(FRAME_SIZE_SAVE_EVERYTHING) 470 .cfi_adjust_cfa_offset (FRAME_SIZE_SAVE_EVERYTHING) 471 SETUP_SAVE_EVERYTHING_FRAME_DECREMENTED_SP \runtime_method_offset 472.endm 473 474.macro RESTORE_SAVE_EVERYTHING_FRAME restore_a0=1 475 // Restore FP registers. 476 l.d $f31, 264($sp) 477 l.d $f30, 256($sp) 478 l.d $f29, 248($sp) 479 l.d $f28, 240($sp) 480 l.d $f27, 232($sp) 481 l.d $f26, 224($sp) 482 l.d $f25, 216($sp) 483 l.d $f24, 208($sp) 484 l.d $f23, 200($sp) 485 l.d $f22, 192($sp) 486 l.d $f21, 184($sp) 487 l.d $f20, 176($sp) 488 l.d $f19, 168($sp) 489 l.d $f18, 160($sp) 490 l.d $f17, 152($sp) 491 l.d $f16, 144($sp) 492 l.d $f15, 136($sp) 493 l.d $f14, 128($sp) 494 l.d $f13, 120($sp) 495 l.d $f12, 112($sp) 496 l.d $f11, 104($sp) 497 l.d $f10, 96($sp) 498 l.d $f9, 88($sp) 499 l.d $f8, 80($sp) 500 l.d $f7, 72($sp) 501 l.d $f6, 64($sp) 502 l.d $f5, 56($sp) 503 l.d $f4, 48($sp) 504 l.d $f3, 40($sp) 505 l.d $f2, 32($sp) 506 l.d $f1, 24($sp) 507 l.d $f0, 16($sp) 508 509 // Restore core registers. 510 .cpreturn 511 ld $ra, 488($sp) 512 .cfi_restore 31 513 ld $s8, 480($sp) 514 .cfi_restore 30 515 ld $t9, 464($sp) 516 .cfi_restore 25 517 ld $t8, 456($sp) 518 .cfi_restore 24 519 ld $s7, 448($sp) 520 .cfi_restore 23 521 ld $s6, 440($sp) 522 .cfi_restore 22 523 ld $s5, 432($sp) 524 .cfi_restore 21 525 ld $s4, 424($sp) 526 .cfi_restore 20 527 ld $s3, 416($sp) 528 .cfi_restore 19 529 ld $s2, 408($sp) 530 .cfi_restore 18 531 ld $s1, 400($sp) 532 .cfi_restore 17 533 ld $s0, 392($sp) 534 .cfi_restore 16 535 ld $t3, 384($sp) 536 .cfi_restore 15 537 ld $t2, 376($sp) 538 .cfi_restore 14 539 ld $t1, 368($sp) 540 .cfi_restore 13 541 ld $t0, 360($sp) 542 .cfi_restore 12 543 ld $a7, 352($sp) 544 .cfi_restore 11 545 ld $a6, 344($sp) 546 .cfi_restore 10 547 ld $a5, 336($sp) 548 .cfi_restore 9 549 ld $a4, 328($sp) 550 .cfi_restore 8 551 ld $a3, 320($sp) 552 .cfi_restore 7 553 ld $a2, 312($sp) 554 .cfi_restore 6 555 ld $a1, 304($sp) 556 .cfi_restore 5 557 .if \restore_a0 558 ld $a0, 296($sp) 559 .cfi_restore 4 560 .endif 561 ld $v1, 288($sp) 562 .cfi_restore 3 563 ld $v0, 280($sp) 564 .cfi_restore 2 565 .set push 566 .set noat 567 ld $at, 272($sp) 568 .cfi_restore 1 569 .set pop 570 571 daddiu $sp, $sp, 496 572 .cfi_adjust_cfa_offset -496 573.endm 574 575 /* 576 * Macro that calls through to artDeliverPendingExceptionFromCode, where the pending 577 * exception is Thread::Current()->exception_ when the runtime method frame is ready. 578 * Requires $gp properly set up. 579 */ 580.macro DELIVER_PENDING_EXCEPTION_FRAME_READY 581 dla $t9, artDeliverPendingExceptionFromCode 582 jalr $zero, $t9 # artDeliverPendingExceptionFromCode(Thread*) 583 move $a0, rSELF # pass Thread::Current 584.endm 585 586 /* 587 * Macro that calls through to artDeliverPendingExceptionFromCode, where the pending 588 * exception is Thread::Current()->exception_. 589 */ 590.macro DELIVER_PENDING_EXCEPTION 591 SETUP_GP 592 SETUP_SAVE_ALL_CALLEE_SAVES_FRAME # save callee saves for throw 593 DELIVER_PENDING_EXCEPTION_FRAME_READY 594.endm 595 596.macro RETURN_IF_NO_EXCEPTION 597 ld $t0, THREAD_EXCEPTION_OFFSET(rSELF) # load Thread::Current()->exception_ 598 RESTORE_SAVE_REFS_ONLY_FRAME 599 bne $t0, $zero, 1f # success if no exception is pending 600 nop 601 jalr $zero, $ra 602 nop 6031: 604 DELIVER_PENDING_EXCEPTION 605.endm 606 607.macro RETURN_IF_ZERO 608 RESTORE_SAVE_REFS_ONLY_FRAME 609 bne $v0, $zero, 1f # success? 610 nop 611 jalr $zero, $ra # return on success 612 nop 6131: 614 DELIVER_PENDING_EXCEPTION 615.endm 616 617.macro RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER 618 RESTORE_SAVE_REFS_ONLY_FRAME 619 beq $v0, $zero, 1f # success? 620 nop 621 jalr $zero, $ra # return on success 622 nop 6231: 624 DELIVER_PENDING_EXCEPTION 625.endm 626 627 /* 628 * On stack replacement stub. 629 * On entry: 630 * a0 = stack to copy 631 * a1 = size of stack 632 * a2 = pc to call 633 * a3 = JValue* result 634 * a4 = shorty 635 * a5 = thread 636 */ 637ENTRY art_quick_osr_stub 638 move $t0, $sp # save stack pointer 639 daddiu $t1, $sp, -112 # reserve stack space 640 dsrl $t1, $t1, 4 # enforce 16 byte stack alignment 641 dsll $sp, $t1, 4 # update stack pointer 642 643 // Save callee general purpose registers, SP, T8(GP), RA, A3, and A4 (8x14 bytes) 644 sd $ra, 104($sp) 645 .cfi_rel_offset 31, 104 646 sd $s8, 96($sp) 647 .cfi_rel_offset 30, 96 648 sd $t0, 88($sp) # save original stack pointer stored in t0 649 .cfi_rel_offset 29, 88 650 sd $t8, 80($sp) # t8 holds caller's gp, now save it to the stack. 651 .cfi_rel_offset 28, 80 # Value from gp is pushed, so set the cfi offset accordingly. 652 sd $s7, 72($sp) 653 .cfi_rel_offset 23, 72 654 sd $s6, 64($sp) 655 .cfi_rel_offset 22, 64 656 sd $s5, 56($sp) 657 .cfi_rel_offset 21, 56 658 sd $s4, 48($sp) 659 .cfi_rel_offset 20, 48 660 sd $s3, 40($sp) 661 .cfi_rel_offset 19, 40 662 sd $s2, 32($sp) 663 .cfi_rel_offset 18, 32 664 sd $s1, 24($sp) 665 .cfi_rel_offset 17, 24 666 sd $s0, 16($sp) 667 .cfi_rel_offset 16, 16 668 sd $a4, 8($sp) 669 .cfi_rel_offset 8, 8 670 sd $a3, 0($sp) 671 .cfi_rel_offset 7, 0 672 move rSELF, $a5 # Save managed thread pointer into rSELF 673 674 daddiu $sp, $sp, -16 675 jal .Losr_entry 676 sd $zero, 0($sp) # Store null for ArtMethod* at bottom of frame 677 daddiu $sp, $sp, 16 678 679 // Restore return value address and shorty address 680 ld $a4, 8($sp) # shorty address 681 .cfi_restore 8 682 ld $a3, 0($sp) # result value address 683 .cfi_restore 7 684 685 lbu $t1, 0($a4) # load return type 686 li $t2, 'D' # put char 'D' into t2 687 beq $t1, $t2, .Losr_fp_result # branch if result type char == 'D' 688 li $t2, 'F' # put char 'F' into t2 689 beq $t1, $t2, .Losr_fp_result # branch if result type char == 'F' 690 nop 691 b .Losr_exit 692 dsrl $v1, $v0, 32 # put high half of result in v1 693.Losr_fp_result: 694 mfc1 $v0, $f0 695 mfhc1 $v1, $f0 # put high half of FP result in v1 696.Losr_exit: 697 sw $v0, 0($a3) # store low half of result 698 sw $v1, 4($a3) # store high half of result 699 700 // Restore callee registers 701 ld $ra, 104($sp) 702 .cfi_restore 31 703 ld $s8, 96($sp) 704 .cfi_restore 30 705 ld $t0, 88($sp) # save SP into t0 for now 706 .cfi_restore 29 707 ld $t8, 80($sp) # Restore gp back to it's temp storage. 708 .cfi_restore 28 709 ld $s7, 72($sp) 710 .cfi_restore 23 711 ld $s6, 64($sp) 712 .cfi_restore 22 713 ld $s5, 56($sp) 714 .cfi_restore 21 715 ld $s4, 48($sp) 716 .cfi_restore 20 717 ld $s3, 40($sp) 718 .cfi_restore 19 719 ld $s2, 32($sp) 720 .cfi_restore 18 721 ld $s1, 24($sp) 722 .cfi_restore 17 723 ld $s0, 16($sp) 724 .cfi_restore 16 725 jalr $zero, $ra 726 move $sp, $t0 727 728.Losr_entry: 729 dsubu $sp, $sp, $a1 # Reserve space for callee stack 730 daddiu $a1, $a1, -8 731 daddu $t0, $a1, $sp 732 sw $ra, 0($t0) # Store low half of RA per compiler ABI 733 dsrl $t1, $ra, 32 734 sw $t1, 4($t0) # Store high half of RA per compiler ABI 735 736 // Copy arguments into callee stack 737 // Use simple copy routine for now. 738 // 4 bytes per slot. 739 // a0 = source address 740 // a1 = args length in bytes (does not include 8 bytes for RA) 741 // sp = destination address 742 beqz $a1, .Losr_loop_exit 743 daddiu $a1, $a1, -4 744 daddu $t1, $a0, $a1 745 daddu $t2, $sp, $a1 746.Losr_loop_entry: 747 lw $t0, 0($t1) 748 daddiu $t1, $t1, -4 749 sw $t0, 0($t2) 750 bne $sp, $t2, .Losr_loop_entry 751 daddiu $t2, $t2, -4 752 753.Losr_loop_exit: 754 move $t9, $a2 755 jalr $zero, $t9 # Jump to the OSR entry point. 756 nop 757END art_quick_osr_stub 758 759 /* 760 * On entry $a0 is uint32_t* gprs_ and $a1 is uint32_t* fprs_ 761 * FIXME: just guessing about the shape of the jmpbuf. Where will pc be? 762 */ 763ENTRY_NO_GP art_quick_do_long_jump 764 l.d $f0, 0($a1) 765 l.d $f1, 8($a1) 766 l.d $f2, 16($a1) 767 l.d $f3, 24($a1) 768 l.d $f4, 32($a1) 769 l.d $f5, 40($a1) 770 l.d $f6, 48($a1) 771 l.d $f7, 56($a1) 772 l.d $f8, 64($a1) 773 l.d $f9, 72($a1) 774 l.d $f10, 80($a1) 775 l.d $f11, 88($a1) 776 l.d $f12, 96($a1) 777 l.d $f13, 104($a1) 778 l.d $f14, 112($a1) 779 l.d $f15, 120($a1) 780 l.d $f16, 128($a1) 781 l.d $f17, 136($a1) 782 l.d $f18, 144($a1) 783 l.d $f19, 152($a1) 784 l.d $f20, 160($a1) 785 l.d $f21, 168($a1) 786 l.d $f22, 176($a1) 787 l.d $f23, 184($a1) 788 l.d $f24, 192($a1) 789 l.d $f25, 200($a1) 790 l.d $f26, 208($a1) 791 l.d $f27, 216($a1) 792 l.d $f28, 224($a1) 793 l.d $f29, 232($a1) 794 l.d $f30, 240($a1) 795 l.d $f31, 248($a1) 796 .set push 797 .set nomacro 798 .set noat 799# no need to load zero 800 ld $at, 8($a0) 801 .set pop 802 ld $v0, 16($a0) 803 ld $v1, 24($a0) 804# a0 has to be loaded last 805 ld $a1, 40($a0) 806 ld $a2, 48($a0) 807 ld $a3, 56($a0) 808 ld $a4, 64($a0) 809 ld $a5, 72($a0) 810 ld $a6, 80($a0) 811 ld $a7, 88($a0) 812 ld $t0, 96($a0) 813 ld $t1, 104($a0) 814 ld $t2, 112($a0) 815 ld $t3, 120($a0) 816 ld $s0, 128($a0) 817 ld $s1, 136($a0) 818 ld $s2, 144($a0) 819 ld $s3, 152($a0) 820 ld $s4, 160($a0) 821 ld $s5, 168($a0) 822 ld $s6, 176($a0) 823 ld $s7, 184($a0) 824 ld $t8, 192($a0) 825 ld $t9, 200($a0) 826# no need to load k0, k1 827 ld $gp, 224($a0) 828 ld $sp, 232($a0) 829 ld $s8, 240($a0) 830 ld $ra, 248($a0) 831 ld $a0, 32($a0) 832 move $v0, $zero # clear result registers v0 and v1 833 jalr $zero, $t9 # do long jump (do not use ra, it must not be clobbered) 834 move $v1, $zero 835END art_quick_do_long_jump 836 837 /* 838 * Called by managed code, saves most registers (forms basis of long jump 839 * context) and passes the bottom of the stack. 840 * artDeliverExceptionFromCode will place the callee save Method* at 841 * the bottom of the thread. On entry a0 holds Throwable* 842 */ 843ENTRY art_quick_deliver_exception 844 SETUP_SAVE_ALL_CALLEE_SAVES_FRAME 845 dla $t9, artDeliverExceptionFromCode 846 jalr $zero, $t9 # artDeliverExceptionFromCode(Throwable*, Thread*) 847 move $a1, rSELF # pass Thread::Current 848END art_quick_deliver_exception 849 850 /* 851 * Called by managed code to create and deliver a NullPointerException 852 */ 853 .extern artThrowNullPointerExceptionFromCode 854ENTRY_NO_GP art_quick_throw_null_pointer_exception 855 // Note that setting up $gp does not rely on $t9 here, so branching here directly is OK, 856 // even after clobbering any registers we don't need to preserve, such as $gp or $t0. 857 SETUP_SAVE_EVERYTHING_FRAME 858 dla $t9, artThrowNullPointerExceptionFromCode 859 jalr $zero, $t9 # artThrowNullPointerExceptionFromCode(Thread*) 860 move $a0, rSELF # pass Thread::Current 861END art_quick_throw_null_pointer_exception 862 863 /* 864 * Call installed by a signal handler to create and deliver a NullPointerException 865 */ 866 .extern artThrowNullPointerExceptionFromSignal 867ENTRY_NO_GP_CUSTOM_CFA art_quick_throw_null_pointer_exception_from_signal, FRAME_SIZE_SAVE_EVERYTHING 868 SETUP_SAVE_EVERYTHING_FRAME_DECREMENTED_SP 869 # Retrieve the fault address from the padding where the signal handler stores it. 870 ld $a0, (__SIZEOF_POINTER__)($sp) 871 dla $t9, artThrowNullPointerExceptionFromSignal 872 jalr $zero, $t9 # artThrowNullPointerExceptionFromSignal(uinptr_t, Thread*) 873 move $a1, rSELF # pass Thread::Current 874END art_quick_throw_null_pointer_exception_from_signal 875 876 /* 877 * Called by managed code to create and deliver an ArithmeticException 878 */ 879 .extern artThrowDivZeroFromCode 880ENTRY_NO_GP art_quick_throw_div_zero 881 SETUP_SAVE_EVERYTHING_FRAME 882 dla $t9, artThrowDivZeroFromCode 883 jalr $zero, $t9 # artThrowDivZeroFromCode(Thread*) 884 move $a0, rSELF # pass Thread::Current 885END art_quick_throw_div_zero 886 887 /* 888 * Called by managed code to create and deliver an 889 * ArrayIndexOutOfBoundsException 890 */ 891 .extern artThrowArrayBoundsFromCode 892ENTRY_NO_GP art_quick_throw_array_bounds 893 // Note that setting up $gp does not rely on $t9 here, so branching here directly is OK, 894 // even after clobbering any registers we don't need to preserve, such as $gp or $t0. 895 SETUP_SAVE_EVERYTHING_FRAME 896 dla $t9, artThrowArrayBoundsFromCode 897 jalr $zero, $t9 # artThrowArrayBoundsFromCode(index, limit, Thread*) 898 move $a2, rSELF # pass Thread::Current 899END art_quick_throw_array_bounds 900 901 /* 902 * Called by managed code to create and deliver a StringIndexOutOfBoundsException 903 * as if thrown from a call to String.charAt(). 904 */ 905 .extern artThrowStringBoundsFromCode 906ENTRY_NO_GP art_quick_throw_string_bounds 907 SETUP_SAVE_EVERYTHING_FRAME 908 dla $t9, artThrowStringBoundsFromCode 909 jalr $zero, $t9 # artThrowStringBoundsFromCode(index, limit, Thread*) 910 move $a2, rSELF # pass Thread::Current 911END art_quick_throw_string_bounds 912 913 /* 914 * Called by managed code to create and deliver a StackOverflowError. 915 */ 916 .extern artThrowStackOverflowFromCode 917ENTRY art_quick_throw_stack_overflow 918 SETUP_SAVE_ALL_CALLEE_SAVES_FRAME 919 dla $t9, artThrowStackOverflowFromCode 920 jalr $zero, $t9 # artThrowStackOverflowFromCode(Thread*) 921 move $a0, rSELF # pass Thread::Current 922END art_quick_throw_stack_overflow 923 924 /* 925 * All generated callsites for interface invokes and invocation slow paths will load arguments 926 * as usual - except instead of loading arg0/$a0 with the target Method*, arg0/$a0 will contain 927 * the method_idx. This wrapper will save arg1-arg3, load the caller's Method*, align the 928 * stack and call the appropriate C helper. 929 * NOTE: "this" is first visable argument of the target, and so can be found in arg1/$a1. 930 * 931 * The helper will attempt to locate the target and return a 128-bit result in $v0/$v1 consisting 932 * of the target Method* in $v0 and method->code_ in $v1. 933 * 934 * If unsuccessful, the helper will return null/null. There will be a pending exception in the 935 * thread and we branch to another stub to deliver it. 936 * 937 * On success this wrapper will restore arguments and *jump* to the target, leaving the ra 938 * pointing back to the original caller. 939 */ 940.macro INVOKE_TRAMPOLINE_BODY cxx_name, save_s4_thru_s8_only=0 941 .extern \cxx_name 942 SETUP_SAVE_REFS_AND_ARGS_FRAME \save_s4_thru_s8_only # save callee saves in case 943 # allocation triggers GC 944 move $a2, rSELF # pass Thread::Current 945 jal \cxx_name # (method_idx, this, Thread*, $sp) 946 move $a3, $sp # pass $sp 947 move $a0, $v0 # save target Method* 948 move $t9, $v1 # save $v0->code_ 949 RESTORE_SAVE_REFS_AND_ARGS_FRAME 950 beq $v0, $zero, 1f 951 nop 952 jalr $zero, $t9 953 nop 9541: 955 DELIVER_PENDING_EXCEPTION 956.endm 957.macro INVOKE_TRAMPOLINE c_name, cxx_name 958ENTRY \c_name 959 INVOKE_TRAMPOLINE_BODY \cxx_name 960END \c_name 961.endm 962 963INVOKE_TRAMPOLINE art_quick_invoke_interface_trampoline_with_access_check, artInvokeInterfaceTrampolineWithAccessCheck 964 965INVOKE_TRAMPOLINE art_quick_invoke_static_trampoline_with_access_check, artInvokeStaticTrampolineWithAccessCheck 966INVOKE_TRAMPOLINE art_quick_invoke_direct_trampoline_with_access_check, artInvokeDirectTrampolineWithAccessCheck 967INVOKE_TRAMPOLINE art_quick_invoke_super_trampoline_with_access_check, artInvokeSuperTrampolineWithAccessCheck 968INVOKE_TRAMPOLINE art_quick_invoke_virtual_trampoline_with_access_check, artInvokeVirtualTrampolineWithAccessCheck 969 970 # On entry: 971 # t0 = shorty 972 # t1 = ptr to arg_array 973 # t2 = number of argument bytes remain 974 # v0 = ptr to stack frame where to copy arg_array 975 # This macro modifies t3, t9 and v0 976.macro LOOP_OVER_SHORTY_LOADING_REG gpu, fpu, label 977 lbu $t3, 0($t0) # get argument type from shorty 978 beqz $t3, \label 979 daddiu $t0, 1 980 li $t9, 68 # put char 'D' into t9 981 beq $t9, $t3, 1f # branch if result type char == 'D' 982 li $t9, 70 # put char 'F' into t9 983 beq $t9, $t3, 2f # branch if result type char == 'F' 984 li $t9, 74 # put char 'J' into t9 985 beq $t9, $t3, 3f # branch if result type char == 'J' 986 nop 987 lw $\gpu, 0($t1) 988 sw $\gpu, 0($v0) 989 daddiu $v0, 4 990 daddiu $t1, 4 991 b 4f 992 daddiu $t2, -4 # delay slot 993 9941: # found double 995 lwu $t3, 0($t1) 996 mtc1 $t3, $\fpu 997 sw $t3, 0($v0) 998 lwu $t3, 4($t1) 999 mthc1 $t3, $\fpu 1000 sw $t3, 4($v0) 1001 daddiu $v0, 8 1002 daddiu $t1, 8 1003 b 4f 1004 daddiu $t2, -8 # delay slot 1005 10062: # found float 1007 lwu $t3, 0($t1) 1008 mtc1 $t3, $\fpu 1009 sw $t3, 0($v0) 1010 daddiu $v0, 4 1011 daddiu $t1, 4 1012 b 4f 1013 daddiu $t2, -4 # delay slot 1014 10153: # found long (8 bytes) 1016 lwu $t3, 0($t1) 1017 sw $t3, 0($v0) 1018 lwu $t9, 4($t1) 1019 sw $t9, 4($v0) 1020 dsll $t9, $t9, 32 1021 or $\gpu, $t9, $t3 1022 daddiu $v0, 8 1023 daddiu $t1, 8 1024 daddiu $t2, -8 10254: 1026.endm 1027 1028 /* 1029 * Invocation stub for quick code. 1030 * On entry: 1031 * a0 = method pointer 1032 * a1 = argument array that must at least contain the this ptr. 1033 * a2 = size of argument array in bytes 1034 * a3 = (managed) thread pointer 1035 * a4 = JValue* result 1036 * a5 = shorty 1037 */ 1038ENTRY_NO_GP art_quick_invoke_stub 1039 # push a4, a5, s0(rSUSPEND), s1(rSELF), s8, ra onto the stack 1040 daddiu $sp, $sp, -48 1041 .cfi_adjust_cfa_offset 48 1042 sd $ra, 40($sp) 1043 .cfi_rel_offset 31, 40 1044 sd $s8, 32($sp) 1045 .cfi_rel_offset 30, 32 1046 sd $s1, 24($sp) 1047 .cfi_rel_offset 17, 24 1048 sd $s0, 16($sp) 1049 .cfi_rel_offset 16, 16 1050 sd $a5, 8($sp) 1051 .cfi_rel_offset 9, 8 1052 sd $a4, 0($sp) 1053 .cfi_rel_offset 8, 0 1054 1055 move $s1, $a3 # move managed thread pointer into s1 (rSELF) 1056 move $s8, $sp # save sp in s8 (fp) 1057 1058 daddiu $t3, $a2, 24 # add 8 for ArtMethod* and 16 for stack alignment 1059 dsrl $t3, $t3, 4 # shift the frame size right 4 1060 dsll $t3, $t3, 4 # shift the frame size left 4 to align to 16 bytes 1061 dsubu $sp, $sp, $t3 # reserve stack space for argument array 1062 1063 daddiu $t0, $a5, 1 # t0 = shorty[1] (skip 1 for return type) 1064 daddiu $t1, $a1, 4 # t1 = ptr to arg_array[4] (skip this ptr) 1065 daddiu $t2, $a2, -4 # t2 = number of argument bytes remain (skip this ptr) 1066 daddiu $v0, $sp, 12 # v0 points to where to copy arg_array 1067 LOOP_OVER_SHORTY_LOADING_REG a2, f14, call_fn 1068 LOOP_OVER_SHORTY_LOADING_REG a3, f15, call_fn 1069 LOOP_OVER_SHORTY_LOADING_REG a4, f16, call_fn 1070 LOOP_OVER_SHORTY_LOADING_REG a5, f17, call_fn 1071 LOOP_OVER_SHORTY_LOADING_REG a6, f18, call_fn 1072 LOOP_OVER_SHORTY_LOADING_REG a7, f19, call_fn 1073 1074 # copy arguments onto stack (t2 should be multiples of 4) 1075 ble $t2, $zero, call_fn # t2 = number of argument bytes remain 10761: 1077 lw $t3, 0($t1) # load from argument array 1078 daddiu $t1, $t1, 4 1079 sw $t3, 0($v0) # save to stack 1080 daddiu $t2, -4 1081 bgt $t2, $zero, 1b # t2 = number of argument bytes remain 1082 daddiu $v0, $v0, 4 1083 1084call_fn: 1085 # call method (a0 and a1 have been untouched) 1086 lwu $a1, 0($a1) # make a1 = this ptr 1087 sw $a1, 8($sp) # copy this ptr (skip 8 bytes for ArtMethod*) 1088 sd $zero, 0($sp) # store null for ArtMethod* at bottom of frame 1089 ld $t9, ART_METHOD_QUICK_CODE_OFFSET_64($a0) # get pointer to the code 1090 jalr $t9 # call the method 1091 nop 1092 move $sp, $s8 # restore sp 1093 1094 # pop a4, a5, s1(rSELF), s8, ra off of the stack 1095 ld $a4, 0($sp) 1096 .cfi_restore 8 1097 ld $a5, 8($sp) 1098 .cfi_restore 9 1099 ld $s0, 16($sp) 1100 .cfi_restore 16 1101 ld $s1, 24($sp) 1102 .cfi_restore 17 1103 ld $s8, 32($sp) 1104 .cfi_restore 30 1105 ld $ra, 40($sp) 1106 .cfi_restore 31 1107 daddiu $sp, $sp, 48 1108 .cfi_adjust_cfa_offset -48 1109 1110 # a4 = JValue* result 1111 # a5 = shorty string 1112 lbu $t1, 0($a5) # get result type from shorty 1113 li $t2, 68 # put char 'D' into t2 1114 beq $t1, $t2, 1f # branch if result type char == 'D' 1115 li $t3, 70 # put char 'F' into t3 1116 beq $t1, $t3, 1f # branch if result type char == 'F' 1117 sw $v0, 0($a4) # store the result 1118 dsrl $v1, $v0, 32 1119 jalr $zero, $ra 1120 sw $v1, 4($a4) # store the other half of the result 11211: 1122 mfc1 $v0, $f0 1123 mfhc1 $v1, $f0 1124 sw $v0, 0($a4) # store the result 1125 jalr $zero, $ra 1126 sw $v1, 4($a4) # store the other half of the result 1127END art_quick_invoke_stub 1128 1129 /* 1130 * Invocation static stub for quick code. 1131 * On entry: 1132 * a0 = method pointer 1133 * a1 = argument array that must at least contain the this ptr. 1134 * a2 = size of argument array in bytes 1135 * a3 = (managed) thread pointer 1136 * a4 = JValue* result 1137 * a5 = shorty 1138 */ 1139ENTRY_NO_GP art_quick_invoke_static_stub 1140 1141 # push a4, a5, s0(rSUSPEND), s1(rSELF), s8, ra, onto the stack 1142 daddiu $sp, $sp, -48 1143 .cfi_adjust_cfa_offset 48 1144 sd $ra, 40($sp) 1145 .cfi_rel_offset 31, 40 1146 sd $s8, 32($sp) 1147 .cfi_rel_offset 30, 32 1148 sd $s1, 24($sp) 1149 .cfi_rel_offset 17, 24 1150 sd $s0, 16($sp) 1151 .cfi_rel_offset 16, 16 1152 sd $a5, 8($sp) 1153 .cfi_rel_offset 9, 8 1154 sd $a4, 0($sp) 1155 .cfi_rel_offset 8, 0 1156 1157 move $s1, $a3 # move managed thread pointer into s1 (rSELF) 1158 move $s8, $sp # save sp in s8 (fp) 1159 1160 daddiu $t3, $a2, 24 # add 8 for ArtMethod* and 16 for stack alignment 1161 dsrl $t3, $t3, 4 # shift the frame size right 4 1162 dsll $t3, $t3, 4 # shift the frame size left 4 to align to 16 bytes 1163 dsubu $sp, $sp, $t3 # reserve stack space for argument array 1164 1165 daddiu $t0, $a5, 1 # t0 = shorty[1] (skip 1 for return type) 1166 move $t1, $a1 # t1 = arg_array 1167 move $t2, $a2 # t2 = number of argument bytes remain 1168 daddiu $v0, $sp, 8 # v0 points to where to copy arg_array 1169 LOOP_OVER_SHORTY_LOADING_REG a1, f13, call_sfn 1170 LOOP_OVER_SHORTY_LOADING_REG a2, f14, call_sfn 1171 LOOP_OVER_SHORTY_LOADING_REG a3, f15, call_sfn 1172 LOOP_OVER_SHORTY_LOADING_REG a4, f16, call_sfn 1173 LOOP_OVER_SHORTY_LOADING_REG a5, f17, call_sfn 1174 LOOP_OVER_SHORTY_LOADING_REG a6, f18, call_sfn 1175 LOOP_OVER_SHORTY_LOADING_REG a7, f19, call_sfn 1176 1177 # copy arguments onto stack (t2 should be multiples of 4) 1178 ble $t2, $zero, call_sfn # t2 = number of argument bytes remain 11791: 1180 lw $t3, 0($t1) # load from argument array 1181 daddiu $t1, $t1, 4 1182 sw $t3, 0($v0) # save to stack 1183 daddiu $t2, -4 1184 bgt $t2, $zero, 1b # t2 = number of argument bytes remain 1185 daddiu $v0, $v0, 4 1186 1187call_sfn: 1188 # call method (a0 has been untouched) 1189 sd $zero, 0($sp) # store null for ArtMethod* at bottom of frame 1190 ld $t9, ART_METHOD_QUICK_CODE_OFFSET_64($a0) # get pointer to the code 1191 jalr $t9 # call the method 1192 nop 1193 move $sp, $s8 # restore sp 1194 1195 # pop a4, a5, s0(rSUSPEND), s1(rSELF), s8, ra off of the stack 1196 ld $a4, 0($sp) 1197 .cfi_restore 8 1198 ld $a5, 8($sp) 1199 .cfi_restore 9 1200 ld $s0, 16($sp) 1201 .cfi_restore 16 1202 ld $s1, 24($sp) 1203 .cfi_restore 17 1204 ld $s8, 32($sp) 1205 .cfi_restore 30 1206 ld $ra, 40($sp) 1207 .cfi_restore 31 1208 daddiu $sp, $sp, 48 1209 .cfi_adjust_cfa_offset -48 1210 1211 # a4 = JValue* result 1212 # a5 = shorty string 1213 lbu $t1, 0($a5) # get result type from shorty 1214 li $t2, 68 # put char 'D' into t2 1215 beq $t1, $t2, 1f # branch if result type char == 'D' 1216 li $t3, 70 # put char 'F' into t3 1217 beq $t1, $t3, 1f # branch if result type char == 'F' 1218 sw $v0, 0($a4) # store the result 1219 dsrl $v1, $v0, 32 1220 jalr $zero, $ra 1221 sw $v1, 4($a4) # store the other half of the result 12221: 1223 mfc1 $v0, $f0 1224 mfhc1 $v1, $f0 1225 sw $v0, 0($a4) # store the result 1226 jalr $zero, $ra 1227 sw $v1, 4($a4) # store the other half of the result 1228END art_quick_invoke_static_stub 1229 1230 /* 1231 * Entry from managed code that calls artHandleFillArrayDataFromCode and 1232 * delivers exception on failure. 1233 */ 1234 .extern artHandleFillArrayDataFromCode 1235ENTRY art_quick_handle_fill_data 1236 SETUP_SAVE_REFS_ONLY_FRAME # save callee saves in case exception allocation triggers GC 1237 ld $a2, FRAME_SIZE_SAVE_REFS_ONLY($sp) # pass referrer's Method* 1238 jal artHandleFillArrayDataFromCode # (payload offset, Array*, method, Thread*) 1239 move $a3, rSELF # pass Thread::Current 1240 RETURN_IF_ZERO 1241END art_quick_handle_fill_data 1242 1243 /* 1244 * Entry from managed code that calls artLockObjectFromCode, may block for GC. 1245 */ 1246 .extern artLockObjectFromCode 1247ENTRY_NO_GP art_quick_lock_object 1248 beqzc $a0, art_quick_throw_null_pointer_exception 1249 li $t8, LOCK_WORD_THIN_LOCK_COUNT_ONE 1250 li $t3, LOCK_WORD_GC_STATE_MASK_SHIFTED_TOGGLED 1251.Lretry_lock: 1252 lw $t0, THREAD_ID_OFFSET(rSELF) # TODO: Can the thread ID really change during the loop? 1253 ll $t1, MIRROR_OBJECT_LOCK_WORD_OFFSET($a0) 1254 and $t2, $t1, $t3 # zero the gc bits 1255 bnezc $t2, .Lnot_unlocked # already thin locked 1256 # Unlocked case - $t1: original lock word that's zero except for the read barrier bits. 1257 or $t2, $t1, $t0 # $t2 holds thread id with count of 0 with preserved read barrier bits 1258 sc $t2, MIRROR_OBJECT_LOCK_WORD_OFFSET($a0) 1259 beqzc $t2, .Lretry_lock # store failed, retry 1260 sync # full (LoadLoad|LoadStore) memory barrier 1261 jic $ra, 0 1262.Lnot_unlocked: 1263 # $t1: original lock word, $t0: thread_id with count of 0 and zero read barrier bits 1264 srl $t2, $t1, LOCK_WORD_STATE_SHIFT 1265 bnezc $t2, .Lslow_lock # if either of the top two bits are set, go slow path 1266 xor $t2, $t1, $t0 # lock_word.ThreadId() ^ self->ThreadId() 1267 andi $t2, $t2, 0xFFFF # zero top 16 bits 1268 bnezc $t2, .Lslow_lock # lock word and self thread id's match -> recursive lock 1269 # otherwise contention, go to slow path 1270 and $t2, $t1, $t3 # zero the gc bits 1271 addu $t2, $t2, $t8 # increment count in lock word 1272 srl $t2, $t2, LOCK_WORD_STATE_SHIFT # if the first gc state bit is set, we overflowed. 1273 bnezc $t2, .Lslow_lock # if we overflow the count go slow path 1274 addu $t2, $t1, $t8 # increment count for real 1275 sc $t2, MIRROR_OBJECT_LOCK_WORD_OFFSET($a0) 1276 beqzc $t2, .Lretry_lock # store failed, retry 1277 nop 1278 jic $ra, 0 1279.Lslow_lock: 1280 .cpsetup $t9, $t8, art_quick_lock_object 1281 SETUP_SAVE_REFS_ONLY_FRAME # save callee saves in case we block 1282 jal artLockObjectFromCode # (Object* obj, Thread*) 1283 move $a1, rSELF # pass Thread::Current 1284 RETURN_IF_ZERO 1285END art_quick_lock_object 1286 1287ENTRY_NO_GP art_quick_lock_object_no_inline 1288 beq $a0, $zero, art_quick_throw_null_pointer_exception 1289 nop 1290 .cpsetup $t9, $t8, art_quick_lock_object_no_inline 1291 SETUP_SAVE_REFS_ONLY_FRAME # save callee saves in case we block 1292 jal artLockObjectFromCode # (Object* obj, Thread*) 1293 move $a1, rSELF # pass Thread::Current 1294 RETURN_IF_ZERO 1295END art_quick_lock_object_no_inline 1296 1297 /* 1298 * Entry from managed code that calls artUnlockObjectFromCode and delivers exception on failure. 1299 */ 1300 .extern artUnlockObjectFromCode 1301ENTRY_NO_GP art_quick_unlock_object 1302 beqzc $a0, art_quick_throw_null_pointer_exception 1303 li $t8, LOCK_WORD_THIN_LOCK_COUNT_ONE 1304 li $t3, LOCK_WORD_GC_STATE_MASK_SHIFTED_TOGGLED 1305.Lretry_unlock: 1306#ifndef USE_READ_BARRIER 1307 lw $t1, MIRROR_OBJECT_LOCK_WORD_OFFSET($a0) 1308#else 1309 ll $t1, MIRROR_OBJECT_LOCK_WORD_OFFSET($a0) # Need to use atomic read-modify-write for read barrier 1310#endif 1311 srl $t2, $t1, LOCK_WORD_STATE_SHIFT 1312 bnezc $t2, .Lslow_unlock # if either of the top two bits are set, go slow path 1313 lw $t0, THREAD_ID_OFFSET(rSELF) 1314 and $t2, $t1, $t3 # zero the gc bits 1315 xor $t2, $t2, $t0 # lock_word.ThreadId() ^ self->ThreadId() 1316 andi $t2, $t2, 0xFFFF # zero top 16 bits 1317 bnezc $t2, .Lslow_unlock # do lock word and self thread id's match? 1318 and $t2, $t1, $t3 # zero the gc bits 1319 bgeuc $t2, $t8, .Lrecursive_thin_unlock 1320 # transition to unlocked 1321 nor $t2, $zero, $t3 # $t2 = LOCK_WORD_GC_STATE_MASK_SHIFTED 1322 and $t2, $t1, $t2 # $t2: zero except for the preserved gc bits 1323 sync # full (LoadStore|StoreStore) memory barrier 1324#ifndef USE_READ_BARRIER 1325 sw $t2, MIRROR_OBJECT_LOCK_WORD_OFFSET($a0) 1326#else 1327 sc $t2, MIRROR_OBJECT_LOCK_WORD_OFFSET($a0) 1328 beqzc $t2, .Lretry_unlock # store failed, retry 1329 nop 1330#endif 1331 jic $ra, 0 1332.Lrecursive_thin_unlock: 1333 # t1: original lock word 1334 subu $t2, $t1, $t8 # decrement count 1335#ifndef USE_READ_BARRIER 1336 sw $t2, MIRROR_OBJECT_LOCK_WORD_OFFSET($a0) 1337#else 1338 sc $t2, MIRROR_OBJECT_LOCK_WORD_OFFSET($a0) 1339 beqzc $t2, .Lretry_unlock # store failed, retry 1340 nop 1341#endif 1342 jic $ra, 0 1343.Lslow_unlock: 1344 .cpsetup $t9, $t8, art_quick_unlock_object 1345 SETUP_SAVE_REFS_ONLY_FRAME # save callee saves in case exception allocation triggers GC 1346 jal artUnlockObjectFromCode # (Object* obj, Thread*) 1347 move $a1, rSELF # pass Thread::Current 1348 RETURN_IF_ZERO 1349END art_quick_unlock_object 1350 1351ENTRY_NO_GP art_quick_unlock_object_no_inline 1352 beq $a0, $zero, art_quick_throw_null_pointer_exception 1353 nop 1354 .cpsetup $t9, $t8, art_quick_unlock_object_no_inline 1355 SETUP_SAVE_REFS_ONLY_FRAME # save callee saves in case exception allocation triggers GC 1356 jal artUnlockObjectFromCode # (Object* obj, Thread*) 1357 move $a1, rSELF # pass Thread::Current 1358 RETURN_IF_ZERO 1359END art_quick_unlock_object_no_inline 1360 1361 /* 1362 * Entry from managed code that calls artInstanceOfFromCode and delivers exception on failure. 1363 */ 1364 .extern artInstanceOfFromCode 1365 .extern artThrowClassCastExceptionForObject 1366ENTRY art_quick_check_instance_of 1367 // Type check using the bit string passes null as the target class. In that case just throw. 1368 beqzc $a1, .Lthrow_class_cast_exception_for_bitstring_check 1369 1370 daddiu $sp, $sp, -32 1371 .cfi_adjust_cfa_offset 32 1372 sd $ra, 24($sp) 1373 .cfi_rel_offset 31, 24 1374 sd $t9, 16($sp) 1375 sd $a1, 8($sp) 1376 sd $a0, 0($sp) 1377 jal artInstanceOfFromCode 1378 .cpreturn # Restore gp from t8 in branch delay slot. 1379 # t8 may be clobbered in artIsAssignableFromCode. 1380 beq $v0, $zero, .Lthrow_class_cast_exception 1381 ld $ra, 24($sp) 1382 jalr $zero, $ra 1383 daddiu $sp, $sp, 32 1384 .cfi_adjust_cfa_offset -32 1385 1386.Lthrow_class_cast_exception: 1387 ld $t9, 16($sp) 1388 ld $a1, 8($sp) 1389 ld $a0, 0($sp) 1390 daddiu $sp, $sp, 32 1391 .cfi_adjust_cfa_offset -32 1392 1393.Lthrow_class_cast_exception_for_bitstring_check: 1394 SETUP_GP 1395 SETUP_SAVE_ALL_CALLEE_SAVES_FRAME 1396 dla $t9, artThrowClassCastExceptionForObject 1397 jalr $zero, $t9 # artThrowClassCastException (Object*, Class*, Thread*) 1398 move $a2, rSELF # pass Thread::Current 1399END art_quick_check_instance_of 1400 1401 1402 /* 1403 * Restore rReg's value from offset($sp) if rReg is not the same as rExclude. 1404 * nReg is the register number for rReg. 1405 */ 1406.macro POP_REG_NE rReg, nReg, offset, rExclude 1407 .ifnc \rReg, \rExclude 1408 ld \rReg, \offset($sp) # restore rReg 1409 .cfi_restore \nReg 1410 .endif 1411.endm 1412 1413 /* 1414 * Macro to insert read barrier, only used in art_quick_aput_obj. 1415 * rObj and rDest are registers, offset is a defined literal such as MIRROR_OBJECT_CLASS_OFFSET. 1416 * TODO: When read barrier has a fast path, add heap unpoisoning support for the fast path. 1417 */ 1418.macro READ_BARRIER rDest, rObj, offset 1419#ifdef USE_READ_BARRIER 1420 # saved registers used in art_quick_aput_obj: a0-a2, t0-t1, t9, ra. 16B-aligned. 1421 daddiu $sp, $sp, -64 1422 .cfi_adjust_cfa_offset 64 1423 sd $ra, 56($sp) 1424 .cfi_rel_offset 31, 56 1425 sd $t9, 48($sp) 1426 .cfi_rel_offset 25, 48 1427 sd $t1, 40($sp) 1428 .cfi_rel_offset 13, 40 1429 sd $t0, 32($sp) 1430 .cfi_rel_offset 12, 32 1431 sd $a2, 16($sp) # padding slot at offset 24 (padding can be any slot in the 64B) 1432 .cfi_rel_offset 6, 16 1433 sd $a1, 8($sp) 1434 .cfi_rel_offset 5, 8 1435 sd $a0, 0($sp) 1436 .cfi_rel_offset 4, 0 1437 1438 # move $a0, \rRef # pass ref in a0 (no-op for now since parameter ref is unused) 1439 .ifnc \rObj, $a1 1440 move $a1, \rObj # pass rObj 1441 .endif 1442 daddiu $a2, $zero, \offset # pass offset 1443 jal artReadBarrierSlow # artReadBarrierSlow(ref, rObj, offset) 1444 .cpreturn # Restore gp from t8 in branch delay slot. 1445 # t8 may be clobbered in artReadBarrierSlow. 1446 # No need to unpoison return value in v0, artReadBarrierSlow() would do the unpoisoning. 1447 move \rDest, $v0 # save return value in rDest 1448 # (rDest cannot be v0 in art_quick_aput_obj) 1449 1450 ld $a0, 0($sp) # restore registers except rDest 1451 # (rDest can only be t0 or t1 in art_quick_aput_obj) 1452 .cfi_restore 4 1453 ld $a1, 8($sp) 1454 .cfi_restore 5 1455 ld $a2, 16($sp) 1456 .cfi_restore 6 1457 POP_REG_NE $t0, 12, 32, \rDest 1458 POP_REG_NE $t1, 13, 40, \rDest 1459 ld $t9, 48($sp) 1460 .cfi_restore 25 1461 ld $ra, 56($sp) # restore $ra 1462 .cfi_restore 31 1463 daddiu $sp, $sp, 64 1464 .cfi_adjust_cfa_offset -64 1465 SETUP_GP # set up gp because we are not returning 1466#else 1467 lwu \rDest, \offset(\rObj) 1468 UNPOISON_HEAP_REF \rDest 1469#endif // USE_READ_BARRIER 1470.endm 1471 1472ENTRY art_quick_aput_obj 1473 beq $a2, $zero, .Ldo_aput_null 1474 nop 1475 READ_BARRIER $t0, $a0, MIRROR_OBJECT_CLASS_OFFSET 1476 READ_BARRIER $t1, $a2, MIRROR_OBJECT_CLASS_OFFSET 1477 READ_BARRIER $t0, $t0, MIRROR_CLASS_COMPONENT_TYPE_OFFSET 1478 bne $t1, $t0, .Lcheck_assignability # value's type == array's component type - trivial assignability 1479 nop 1480.Ldo_aput: 1481 dsll $a1, $a1, 2 1482 daddu $t0, $a0, $a1 1483 POISON_HEAP_REF $a2 1484 sw $a2, MIRROR_OBJECT_ARRAY_DATA_OFFSET($t0) 1485 ld $t0, THREAD_CARD_TABLE_OFFSET(rSELF) 1486 dsrl $t1, $a0, CARD_TABLE_CARD_SHIFT 1487 daddu $t1, $t1, $t0 1488 sb $t0, ($t1) 1489 jalr $zero, $ra 1490 .cpreturn # Restore gp from t8 in branch delay slot. 1491.Ldo_aput_null: 1492 dsll $a1, $a1, 2 1493 daddu $t0, $a0, $a1 1494 sw $a2, MIRROR_OBJECT_ARRAY_DATA_OFFSET($t0) 1495 jalr $zero, $ra 1496 .cpreturn # Restore gp from t8 in branch delay slot. 1497.Lcheck_assignability: 1498 daddiu $sp, $sp, -64 1499 .cfi_adjust_cfa_offset 64 1500 sd $ra, 56($sp) 1501 .cfi_rel_offset 31, 56 1502 sd $t9, 24($sp) 1503 sd $a2, 16($sp) 1504 sd $a1, 8($sp) 1505 sd $a0, 0($sp) 1506 move $a1, $t1 1507 move $a0, $t0 1508 jal artIsAssignableFromCode # (Class*, Class*) 1509 .cpreturn # Restore gp from t8 in branch delay slot. 1510 # t8 may be clobbered in artIsAssignableFromCode. 1511 ld $ra, 56($sp) 1512 ld $t9, 24($sp) 1513 ld $a2, 16($sp) 1514 ld $a1, 8($sp) 1515 ld $a0, 0($sp) 1516 daddiu $sp, $sp, 64 1517 .cfi_adjust_cfa_offset -64 1518 SETUP_GP 1519 bne $v0, $zero, .Ldo_aput 1520 nop 1521 SETUP_SAVE_ALL_CALLEE_SAVES_FRAME 1522 move $a1, $a2 1523 dla $t9, artThrowArrayStoreException 1524 jalr $zero, $t9 # artThrowArrayStoreException(Class*, Class*, Thread*) 1525 move $a2, rSELF # pass Thread::Current 1526END art_quick_aput_obj 1527 1528// Macros taking opportunity of code similarities for downcalls. 1529.macro ONE_ARG_REF_DOWNCALL name, entrypoint, return, extend=0 1530 .extern \entrypoint 1531ENTRY \name 1532 SETUP_SAVE_REFS_ONLY_FRAME # save callee saves in case of GC 1533 dla $t9, \entrypoint 1534 jalr $t9 # (field_idx, Thread*) 1535 move $a1, rSELF # pass Thread::Current 1536 .if \extend 1537 sll $v0, $v0, 0 # sign-extend 32-bit result 1538 .endif 1539 \return # RETURN_IF_NO_EXCEPTION or RETURN_IF_ZERO 1540END \name 1541.endm 1542 1543.macro TWO_ARG_REF_DOWNCALL name, entrypoint, return, extend=0 1544 .extern \entrypoint 1545ENTRY \name 1546 SETUP_SAVE_REFS_ONLY_FRAME # save callee saves in case of GC 1547 dla $t9, \entrypoint 1548 jalr $t9 # (field_idx, Object*, Thread*) or 1549 # (field_idx, new_val, Thread*) 1550 move $a2, rSELF # pass Thread::Current 1551 .if \extend 1552 sll $v0, $v0, 0 # sign-extend 32-bit result 1553 .endif 1554 \return # RETURN_IF_NO_EXCEPTION or RETURN_IF_ZERO 1555END \name 1556.endm 1557 1558.macro THREE_ARG_REF_DOWNCALL name, entrypoint, return, extend=0 1559 .extern \entrypoint 1560ENTRY \name 1561 SETUP_SAVE_REFS_ONLY_FRAME # save callee saves in case of GC 1562 dla $t9, \entrypoint 1563 jalr $t9 # (field_idx, Object*, new_val, Thread*) 1564 move $a3, rSELF # pass Thread::Current 1565 .if \extend 1566 sll $v0, $v0, 0 # sign-extend 32-bit result 1567 .endif 1568 \return # RETURN_IF_NO_EXCEPTION or RETURN_IF_ZERO 1569END \name 1570.endm 1571 1572 /* 1573 * Called by managed code to resolve a static/instance field and load/store a value. 1574 * 1575 * Note: Functions `art{Get,Set}<Kind>{Static,Instance}FromCompiledCode` are 1576 * defined with a macro in runtime/entrypoints/quick/quick_field_entrypoints.cc. 1577 */ 1578ONE_ARG_REF_DOWNCALL art_quick_get_byte_static, artGetByteStaticFromCompiledCode, RETURN_IF_NO_EXCEPTION 1579ONE_ARG_REF_DOWNCALL art_quick_get_boolean_static, artGetBooleanStaticFromCompiledCode, RETURN_IF_NO_EXCEPTION 1580ONE_ARG_REF_DOWNCALL art_quick_get_short_static, artGetShortStaticFromCompiledCode, RETURN_IF_NO_EXCEPTION 1581ONE_ARG_REF_DOWNCALL art_quick_get_char_static, artGetCharStaticFromCompiledCode, RETURN_IF_NO_EXCEPTION 1582ONE_ARG_REF_DOWNCALL art_quick_get32_static, artGet32StaticFromCompiledCode, RETURN_IF_NO_EXCEPTION, 1 1583ONE_ARG_REF_DOWNCALL art_quick_get_obj_static, artGetObjStaticFromCompiledCode, RETURN_IF_NO_EXCEPTION 1584ONE_ARG_REF_DOWNCALL art_quick_get64_static, artGet64StaticFromCompiledCode, RETURN_IF_NO_EXCEPTION 1585TWO_ARG_REF_DOWNCALL art_quick_get_byte_instance, artGetByteInstanceFromCompiledCode, RETURN_IF_NO_EXCEPTION 1586TWO_ARG_REF_DOWNCALL art_quick_get_boolean_instance, artGetBooleanInstanceFromCompiledCode, RETURN_IF_NO_EXCEPTION 1587TWO_ARG_REF_DOWNCALL art_quick_get_short_instance, artGetShortInstanceFromCompiledCode, RETURN_IF_NO_EXCEPTION 1588TWO_ARG_REF_DOWNCALL art_quick_get_char_instance, artGetCharInstanceFromCompiledCode, RETURN_IF_NO_EXCEPTION 1589TWO_ARG_REF_DOWNCALL art_quick_get32_instance, artGet32InstanceFromCompiledCode, RETURN_IF_NO_EXCEPTION, 1 1590TWO_ARG_REF_DOWNCALL art_quick_get_obj_instance, artGetObjInstanceFromCompiledCode, RETURN_IF_NO_EXCEPTION 1591TWO_ARG_REF_DOWNCALL art_quick_get64_instance, artGet64InstanceFromCompiledCode, RETURN_IF_NO_EXCEPTION 1592TWO_ARG_REF_DOWNCALL art_quick_set8_static, artSet8StaticFromCompiledCode, RETURN_IF_ZERO 1593TWO_ARG_REF_DOWNCALL art_quick_set16_static, artSet16StaticFromCompiledCode, RETURN_IF_ZERO 1594TWO_ARG_REF_DOWNCALL art_quick_set32_static, artSet32StaticFromCompiledCode, RETURN_IF_ZERO 1595TWO_ARG_REF_DOWNCALL art_quick_set_obj_static, artSetObjStaticFromCompiledCode, RETURN_IF_ZERO 1596TWO_ARG_REF_DOWNCALL art_quick_set64_static, artSet64StaticFromCompiledCode, RETURN_IF_ZERO 1597THREE_ARG_REF_DOWNCALL art_quick_set8_instance, artSet8InstanceFromCompiledCode, RETURN_IF_ZERO 1598THREE_ARG_REF_DOWNCALL art_quick_set16_instance, artSet16InstanceFromCompiledCode, RETURN_IF_ZERO 1599THREE_ARG_REF_DOWNCALL art_quick_set32_instance, artSet32InstanceFromCompiledCode, RETURN_IF_ZERO 1600THREE_ARG_REF_DOWNCALL art_quick_set_obj_instance, artSetObjInstanceFromCompiledCode, RETURN_IF_ZERO 1601THREE_ARG_REF_DOWNCALL art_quick_set64_instance, artSet64InstanceFromCompiledCode, RETURN_IF_ZERO 1602 1603// Macro to facilitate adding new allocation entrypoints. 1604.macro ONE_ARG_DOWNCALL name, entrypoint, return 1605 .extern \entrypoint 1606ENTRY \name 1607 SETUP_SAVE_REFS_ONLY_FRAME # save callee saves in case of GC 1608 jal \entrypoint 1609 move $a1, rSELF # pass Thread::Current 1610 \return 1611END \name 1612.endm 1613 1614// Macro to facilitate adding new allocation entrypoints. 1615.macro TWO_ARG_DOWNCALL name, entrypoint, return 1616 .extern \entrypoint 1617ENTRY \name 1618 SETUP_SAVE_REFS_ONLY_FRAME # save callee saves in case of GC 1619 jal \entrypoint 1620 move $a2, rSELF # pass Thread::Current 1621 \return 1622END \name 1623.endm 1624 1625.macro THREE_ARG_DOWNCALL name, entrypoint, return 1626 .extern \entrypoint 1627ENTRY \name 1628 SETUP_SAVE_REFS_ONLY_FRAME # save callee saves in case of GC 1629 jal \entrypoint 1630 move $a3, rSELF # pass Thread::Current 1631 \return 1632END \name 1633.endm 1634 1635.macro FOUR_ARG_DOWNCALL name, entrypoint, return 1636 .extern \entrypoint 1637ENTRY \name 1638 SETUP_SAVE_REFS_ONLY_FRAME # save callee saves in case of GC 1639 jal \entrypoint 1640 move $a4, rSELF # pass Thread::Current 1641 \return 1642END \name 1643.endm 1644 1645// Generate the allocation entrypoints for each allocator. 1646GENERATE_ALLOC_ENTRYPOINTS_FOR_NON_TLAB_ALLOCATORS 1647// Comment out allocators that have mips64 specific asm. 1648// GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_RESOLVED(_region_tlab, RegionTLAB) 1649// GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_INITIALIZED(_region_tlab, RegionTLAB) 1650GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_WITH_ACCESS_CHECK(_region_tlab, RegionTLAB) 1651GENERATE_ALLOC_ENTRYPOINTS_ALLOC_STRING_OBJECT(_region_tlab, RegionTLAB) 1652// GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_RESOLVED(_region_tlab, RegionTLAB) 1653// GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_RESOLVED8(_region_tlab, RegionTLAB) 1654// GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_RESOLVED16(_region_tlab, RegionTLAB) 1655// GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_RESOLVED32(_region_tlab, RegionTLAB) 1656// GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_RESOLVED64(_region_tlab, RegionTLAB) 1657GENERATE_ALLOC_ENTRYPOINTS_ALLOC_STRING_FROM_BYTES(_region_tlab, RegionTLAB) 1658GENERATE_ALLOC_ENTRYPOINTS_ALLOC_STRING_FROM_CHARS(_region_tlab, RegionTLAB) 1659GENERATE_ALLOC_ENTRYPOINTS_ALLOC_STRING_FROM_STRING(_region_tlab, RegionTLAB) 1660 1661// GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_RESOLVED(_tlab, TLAB) 1662// GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_INITIALIZED(_tlab, TLAB) 1663GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_WITH_ACCESS_CHECK(_tlab, TLAB) 1664GENERATE_ALLOC_ENTRYPOINTS_ALLOC_STRING_OBJECT(_tlab, TLAB) 1665// GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_RESOLVED(_tlab, TLAB) 1666// GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_RESOLVED8(_tlab, TLAB) 1667// GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_RESOLVED16(_tlab, TLAB) 1668// GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_RESOLVED32(_tlab, TLAB) 1669// GENERATE_ALLOC_ENTRYPOINTS_ALLOC_ARRAY_RESOLVED64(_tlab, TLAB) 1670GENERATE_ALLOC_ENTRYPOINTS_ALLOC_STRING_FROM_BYTES(_tlab, TLAB) 1671GENERATE_ALLOC_ENTRYPOINTS_ALLOC_STRING_FROM_CHARS(_tlab, TLAB) 1672GENERATE_ALLOC_ENTRYPOINTS_ALLOC_STRING_FROM_STRING(_tlab, TLAB) 1673 1674// A hand-written override for: 1675// GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_RESOLVED(_rosalloc, RosAlloc) 1676// GENERATE_ALLOC_ENTRYPOINTS_ALLOC_OBJECT_INITIALIZED(_rosalloc, RosAlloc) 1677.macro ART_QUICK_ALLOC_OBJECT_ROSALLOC c_name, cxx_name, isInitialized 1678ENTRY_NO_GP \c_name 1679 # Fast path rosalloc allocation 1680 # a0: type 1681 # s1: Thread::Current 1682 # ----------------------------- 1683 # t1: object size 1684 # t2: rosalloc run 1685 # t3: thread stack top offset 1686 # a4: thread stack bottom offset 1687 # v0: free list head 1688 # 1689 # a5, a6 : temps 1690 ld $t3, THREAD_LOCAL_ALLOC_STACK_TOP_OFFSET($s1) # Check if thread local allocation stack 1691 ld $a4, THREAD_LOCAL_ALLOC_STACK_END_OFFSET($s1) # has any room left. 1692 bgeuc $t3, $a4, .Lslow_path_\c_name 1693 1694 lwu $t1, MIRROR_CLASS_OBJECT_SIZE_ALLOC_FAST_PATH_OFFSET($a0) # Load object size (t1). 1695 li $a5, ROSALLOC_MAX_THREAD_LOCAL_BRACKET_SIZE # Check if size is for a thread local 1696 # allocation. Also does the initialized 1697 # and finalizable checks. 1698 # When isInitialized == 0, then the class is potentially not yet initialized. 1699 # If the class is not yet initialized, the object size will be very large to force the branch 1700 # below to be taken. 1701 # 1702 # See InitializeClassVisitors in class-inl.h for more details. 1703 bltuc $a5, $t1, .Lslow_path_\c_name 1704 1705 # Compute the rosalloc bracket index from the size. Since the size is already aligned we can 1706 # combine the two shifts together. 1707 dsrl $t1, $t1, (ROSALLOC_BRACKET_QUANTUM_SIZE_SHIFT - POINTER_SIZE_SHIFT) 1708 1709 daddu $t2, $t1, $s1 1710 ld $t2, (THREAD_ROSALLOC_RUNS_OFFSET - __SIZEOF_POINTER__)($t2) # Load rosalloc run (t2). 1711 1712 # Load the free list head (v0). 1713 # NOTE: this will be the return val. 1714 ld $v0, (ROSALLOC_RUN_FREE_LIST_OFFSET + ROSALLOC_RUN_FREE_LIST_HEAD_OFFSET)($t2) 1715 beqzc $v0, .Lslow_path_\c_name 1716 1717 # Load the next pointer of the head and update the list head with the next pointer. 1718 ld $a5, ROSALLOC_SLOT_NEXT_OFFSET($v0) 1719 sd $a5, (ROSALLOC_RUN_FREE_LIST_OFFSET + ROSALLOC_RUN_FREE_LIST_HEAD_OFFSET)($t2) 1720 1721 # Store the class pointer in the header. This also overwrites the first pointer. The offsets are 1722 # asserted to match. 1723 1724#if ROSALLOC_SLOT_NEXT_OFFSET != MIRROR_OBJECT_CLASS_OFFSET 1725#error "Class pointer needs to overwrite next pointer." 1726#endif 1727 1728 POISON_HEAP_REF $a0 1729 sw $a0, MIRROR_OBJECT_CLASS_OFFSET($v0) 1730 1731 # Push the new object onto the thread local allocation stack and increment the thread local 1732 # allocation stack top. 1733 sw $v0, 0($t3) 1734 daddiu $t3, $t3, COMPRESSED_REFERENCE_SIZE 1735 sd $t3, THREAD_LOCAL_ALLOC_STACK_TOP_OFFSET($s1) 1736 1737 # Decrement the size of the free list. 1738 lw $a5, (ROSALLOC_RUN_FREE_LIST_OFFSET + ROSALLOC_RUN_FREE_LIST_SIZE_OFFSET)($t2) 1739 addiu $a5, $a5, -1 1740 sw $a5, (ROSALLOC_RUN_FREE_LIST_OFFSET + ROSALLOC_RUN_FREE_LIST_SIZE_OFFSET)($t2) 1741 1742.if \isInitialized == 0 1743 # This barrier is only necessary when the allocation also requires a class initialization check. 1744 # 1745 # If the class is already observably initialized, then new-instance allocations are protected 1746 # from publishing by the compiler which inserts its own StoreStore barrier. 1747 sync # Fence. 1748.endif 1749 jic $ra, 0 1750 1751.Lslow_path_\c_name: 1752 SETUP_GP 1753 SETUP_SAVE_REFS_ONLY_FRAME 1754 jal \cxx_name 1755 move $a1 ,$s1 # Pass self as argument. 1756 RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER 1757END \c_name 1758.endm 1759 1760ART_QUICK_ALLOC_OBJECT_ROSALLOC art_quick_alloc_object_resolved_rosalloc, artAllocObjectFromCodeResolvedRosAlloc, /* isInitialized */ 0 1761ART_QUICK_ALLOC_OBJECT_ROSALLOC art_quick_alloc_object_initialized_rosalloc, artAllocObjectFromCodeInitializedRosAlloc, /* isInitialized */ 1 1762 1763// The common fast path code for art_quick_alloc_object_resolved/initialized_tlab 1764// and art_quick_alloc_object_resolved/initialized_region_tlab. 1765// 1766// a0: type, s1(rSELF): Thread::Current 1767// Need to preserve a0 to the slow path. 1768// 1769// If isInitialized=1 then the compiler assumes the object's class has already been initialized. 1770// If isInitialized=0 the compiler can only assume it's been at least resolved. 1771.macro ALLOC_OBJECT_RESOLVED_TLAB_FAST_PATH slowPathLabel isInitialized 1772 ld $v0, THREAD_LOCAL_POS_OFFSET(rSELF) # Load thread_local_pos. 1773 ld $a2, THREAD_LOCAL_END_OFFSET(rSELF) # Load thread_local_end. 1774 lwu $t0, MIRROR_CLASS_OBJECT_SIZE_ALLOC_FAST_PATH_OFFSET($a0) # Load the object size. 1775 daddu $a3, $v0, $t0 # Add object size to tlab pos. 1776 1777 # When isInitialized == 0, then the class is potentially not yet initialized. 1778 # If the class is not yet initialized, the object size will be very large to force the branch 1779 # below to be taken. 1780 # 1781 # See InitializeClassVisitors in class-inl.h for more details. 1782 bltuc $a2, $a3, \slowPathLabel # Check if it fits, overflow works since the 1783 # tlab pos and end are 32 bit values. 1784 # "Point of no slow path". Won't go to the slow path from here on. 1785 sd $a3, THREAD_LOCAL_POS_OFFSET(rSELF) # Store new thread_local_pos. 1786 ld $a2, THREAD_LOCAL_OBJECTS_OFFSET(rSELF) # Increment thread_local_objects. 1787 daddiu $a2, $a2, 1 1788 sd $a2, THREAD_LOCAL_OBJECTS_OFFSET(rSELF) 1789 POISON_HEAP_REF $a0 1790 sw $a0, MIRROR_OBJECT_CLASS_OFFSET($v0) # Store the class pointer. 1791 1792.if \isInitialized == 0 1793 # This barrier is only necessary when the allocation also requires a class initialization check. 1794 # 1795 # If the class is already observably initialized, then new-instance allocations are protected 1796 # from publishing by the compiler which inserts its own StoreStore barrier. 1797 sync # Fence. 1798.endif 1799 jic $ra, 0 1800.endm 1801 1802// The common code for art_quick_alloc_object_resolved/initialized_tlab 1803// and art_quick_alloc_object_resolved/initialized_region_tlab. 1804.macro GENERATE_ALLOC_OBJECT_TLAB name, entrypoint, isInitialized 1805ENTRY_NO_GP \name 1806 # Fast path tlab allocation. 1807 # a0: type, s1(rSELF): Thread::Current. 1808 ALLOC_OBJECT_RESOLVED_TLAB_FAST_PATH .Lslow_path_\name, \isInitialized 1809.Lslow_path_\name: 1810 SETUP_GP 1811 SETUP_SAVE_REFS_ONLY_FRAME # Save callee saves in case of GC. 1812 jal \entrypoint # (mirror::Class*, Thread*) 1813 move $a1, rSELF # Pass Thread::Current. 1814 RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER 1815END \name 1816.endm 1817 1818GENERATE_ALLOC_OBJECT_TLAB art_quick_alloc_object_resolved_region_tlab, artAllocObjectFromCodeResolvedRegionTLAB, /* isInitialized */ 0 1819GENERATE_ALLOC_OBJECT_TLAB art_quick_alloc_object_initialized_region_tlab, artAllocObjectFromCodeInitializedRegionTLAB, /* isInitialized */ 1 1820GENERATE_ALLOC_OBJECT_TLAB art_quick_alloc_object_resolved_tlab, artAllocObjectFromCodeResolvedTLAB, /* isInitialized */ 0 1821GENERATE_ALLOC_OBJECT_TLAB art_quick_alloc_object_initialized_tlab, artAllocObjectFromCodeInitializedTLAB, /* isInitialized */ 1 1822 1823// The common fast path code for art_quick_alloc_array_resolved/initialized_tlab 1824// and art_quick_alloc_array_resolved/initialized_region_tlab. 1825// 1826// a0: type, a1: component_count, a2: total_size, s1(rSELF): Thread::Current. 1827// Need to preserve a0 and a1 to the slow path. 1828.macro ALLOC_ARRAY_TLAB_FAST_PATH_RESOLVED_WITH_SIZE slowPathLabel 1829 dli $a3, OBJECT_ALIGNMENT_MASK_TOGGLED64 # Apply alignemnt mask (addr + 7) & ~7. 1830 and $a2, $a2, $a3 # The mask must be 64 bits to keep high 1831 # bits in case of overflow. 1832 # Negative sized arrays are handled here since a1 holds a zero extended 32 bit value. 1833 # Negative ints become large 64 bit unsigned ints which will always be larger than max signed 1834 # 32 bit int. Since the max shift for arrays is 3, it can not become a negative 64 bit int. 1835 dli $a3, MIN_LARGE_OBJECT_THRESHOLD 1836 bgeuc $a2, $a3, \slowPathLabel # Possibly a large object, go slow path. 1837 1838 ld $v0, THREAD_LOCAL_POS_OFFSET(rSELF) # Load thread_local_pos. 1839 ld $t1, THREAD_LOCAL_END_OFFSET(rSELF) # Load thread_local_end. 1840 dsubu $t2, $t1, $v0 # Compute the remaining buffer size. 1841 bltuc $t2, $a2, \slowPathLabel # Check tlab for space, note that we use 1842 # (end - begin) to handle negative size 1843 # arrays. It is assumed that a negative size 1844 # will always be greater unsigned than region 1845 # size. 1846 1847 # "Point of no slow path". Won't go to the slow path from here on. 1848 daddu $a2, $v0, $a2 # Add object size to tlab pos. 1849 sd $a2, THREAD_LOCAL_POS_OFFSET(rSELF) # Store new thread_local_pos. 1850 ld $a2, THREAD_LOCAL_OBJECTS_OFFSET(rSELF) # Increment thread_local_objects. 1851 daddiu $a2, $a2, 1 1852 sd $a2, THREAD_LOCAL_OBJECTS_OFFSET(rSELF) 1853 POISON_HEAP_REF $a0 1854 sw $a0, MIRROR_OBJECT_CLASS_OFFSET($v0) # Store the class pointer. 1855 sw $a1, MIRROR_ARRAY_LENGTH_OFFSET($v0) # Store the array length. 1856 1857 jic $ra, 0 1858.endm 1859 1860.macro GENERATE_ALLOC_ARRAY_TLAB name, entrypoint, size_setup 1861ENTRY_NO_GP \name 1862 # Fast path array allocation for region tlab allocation. 1863 # a0: mirror::Class* type 1864 # a1: int32_t component_count 1865 # s1(rSELF): Thread::Current 1866 dext $a4, $a1, 0, 32 # Create zero-extended component_count. Value 1867 # in a1 is preserved in a case of slow path. 1868 \size_setup .Lslow_path_\name 1869 ALLOC_ARRAY_TLAB_FAST_PATH_RESOLVED_WITH_SIZE .Lslow_path_\name 1870.Lslow_path_\name: 1871 # a0: mirror::Class* type 1872 # a1: int32_t component_count 1873 # a2: Thread* self 1874 SETUP_GP 1875 SETUP_SAVE_REFS_ONLY_FRAME # Save callee saves in case of GC. 1876 jal \entrypoint 1877 move $a2, rSELF # Pass Thread::Current. 1878 RETURN_IF_RESULT_IS_NON_ZERO_OR_DELIVER 1879END \name 1880.endm 1881 1882.macro COMPUTE_ARRAY_SIZE_UNKNOWN slow_path 1883 # Array classes are never finalizable or uninitialized, no need to check. 1884 lwu $a3, MIRROR_CLASS_COMPONENT_TYPE_OFFSET($a0) # Load component type. 1885 UNPOISON_HEAP_REF $a3 1886 lw $a3, MIRROR_CLASS_OBJECT_PRIMITIVE_TYPE_OFFSET($a3) 1887 dsrl $a3, $a3, PRIMITIVE_TYPE_SIZE_SHIFT_SHIFT # Component size shift is in high 16 bits. 1888 dsllv $a2, $a4, $a3 # Calculate data size. 1889 # Add array data offset and alignment. 1890 daddiu $a2, $a2, (MIRROR_INT_ARRAY_DATA_OFFSET + OBJECT_ALIGNMENT_MASK) 1891#if MIRROR_WIDE_ARRAY_DATA_OFFSET != MIRROR_INT_ARRAY_DATA_OFFSET + 4 1892#error Long array data offset must be 4 greater than int array data offset. 1893#endif 1894 1895 daddiu $a3, $a3, 1 # Add 4 to the length only if the component 1896 andi $a3, $a3, 4 # size shift is 3 (for 64 bit alignment). 1897 daddu $a2, $a2, $a3 1898.endm 1899 1900.macro COMPUTE_ARRAY_SIZE_8 slow_path 1901 # Add array data offset and alignment. 1902 daddiu $a2, $a4, (MIRROR_INT_ARRAY_DATA_OFFSET + OBJECT_ALIGNMENT_MASK) 1903.endm 1904 1905.macro COMPUTE_ARRAY_SIZE_16 slow_path 1906 dsll $a2, $a4, 1 1907 # Add array data offset and alignment. 1908 daddiu $a2, $a2, (MIRROR_INT_ARRAY_DATA_OFFSET + OBJECT_ALIGNMENT_MASK) 1909.endm 1910 1911.macro COMPUTE_ARRAY_SIZE_32 slow_path 1912 dsll $a2, $a4, 2 1913 # Add array data offset and alignment. 1914 daddiu $a2, $a2, (MIRROR_INT_ARRAY_DATA_OFFSET + OBJECT_ALIGNMENT_MASK) 1915.endm 1916 1917.macro COMPUTE_ARRAY_SIZE_64 slow_path 1918 dsll $a2, $a4, 3 1919 # Add array data offset and alignment. 1920 daddiu $a2, $a2, (MIRROR_WIDE_ARRAY_DATA_OFFSET + OBJECT_ALIGNMENT_MASK) 1921.endm 1922 1923GENERATE_ALLOC_ARRAY_TLAB art_quick_alloc_array_resolved_region_tlab, artAllocArrayFromCodeResolvedRegionTLAB, COMPUTE_ARRAY_SIZE_UNKNOWN 1924GENERATE_ALLOC_ARRAY_TLAB art_quick_alloc_array_resolved8_region_tlab, artAllocArrayFromCodeResolvedRegionTLAB, COMPUTE_ARRAY_SIZE_8 1925GENERATE_ALLOC_ARRAY_TLAB art_quick_alloc_array_resolved16_region_tlab, artAllocArrayFromCodeResolvedRegionTLAB, COMPUTE_ARRAY_SIZE_16 1926GENERATE_ALLOC_ARRAY_TLAB art_quick_alloc_array_resolved32_region_tlab, artAllocArrayFromCodeResolvedRegionTLAB, COMPUTE_ARRAY_SIZE_32 1927GENERATE_ALLOC_ARRAY_TLAB art_quick_alloc_array_resolved64_region_tlab, artAllocArrayFromCodeResolvedRegionTLAB, COMPUTE_ARRAY_SIZE_64 1928 1929GENERATE_ALLOC_ARRAY_TLAB art_quick_alloc_array_resolved_tlab, artAllocArrayFromCodeResolvedTLAB, COMPUTE_ARRAY_SIZE_UNKNOWN 1930GENERATE_ALLOC_ARRAY_TLAB art_quick_alloc_array_resolved8_tlab, artAllocArrayFromCodeResolvedTLAB, COMPUTE_ARRAY_SIZE_8 1931GENERATE_ALLOC_ARRAY_TLAB art_quick_alloc_array_resolved16_tlab, artAllocArrayFromCodeResolvedTLAB, COMPUTE_ARRAY_SIZE_16 1932GENERATE_ALLOC_ARRAY_TLAB art_quick_alloc_array_resolved32_tlab, artAllocArrayFromCodeResolvedTLAB, COMPUTE_ARRAY_SIZE_32 1933GENERATE_ALLOC_ARRAY_TLAB art_quick_alloc_array_resolved64_tlab, artAllocArrayFromCodeResolvedTLAB, COMPUTE_ARRAY_SIZE_64 1934 1935 /* 1936 * Macro for resolution and initialization of indexed DEX file 1937 * constants such as classes and strings. $a0 is both input and 1938 * output. 1939 */ 1940.macro ONE_ARG_SAVE_EVERYTHING_DOWNCALL name, entrypoint, runtime_method_offset = RUNTIME_SAVE_EVERYTHING_METHOD_OFFSET 1941 .extern \entrypoint 1942ENTRY_NO_GP \name 1943 SETUP_SAVE_EVERYTHING_FRAME \runtime_method_offset # Save everything in case of GC. 1944 dla $t9, \entrypoint 1945 jalr $t9 # (uint32_t index, Thread*) 1946 move $a1, rSELF # Pass Thread::Current (in delay slot). 1947 beqz $v0, 1f # Success? 1948 move $a0, $v0 # Move result to $a0 (in delay slot). 1949 RESTORE_SAVE_EVERYTHING_FRAME 0 # Restore everything except $a0. 1950 jic $ra, 0 # Return on success. 19511: 1952 DELIVER_PENDING_EXCEPTION_FRAME_READY 1953END \name 1954.endm 1955 1956.macro ONE_ARG_SAVE_EVERYTHING_DOWNCALL_FOR_CLINIT name, entrypoint 1957 ONE_ARG_SAVE_EVERYTHING_DOWNCALL \name, \entrypoint, RUNTIME_SAVE_EVERYTHING_FOR_CLINIT_METHOD_OFFSET 1958.endm 1959 1960 /* 1961 * Entry from managed code to resolve a method handle. On entry, A0 holds the method handle 1962 * index. On success the MethodHandle is returned, otherwise an exception is raised. 1963 */ 1964ONE_ARG_SAVE_EVERYTHING_DOWNCALL art_quick_resolve_method_handle, artResolveMethodHandleFromCode 1965 1966 /* 1967 * Entry from managed code to resolve a method type. On entry, A0 holds the method type index. 1968 * On success the MethodType is returned, otherwise an exception is raised. 1969 */ 1970ONE_ARG_SAVE_EVERYTHING_DOWNCALL art_quick_resolve_method_type, artResolveMethodTypeFromCode 1971 1972 /* 1973 * Entry from managed code to resolve a string, this stub will allocate a String and deliver an 1974 * exception on error. On success the String is returned. A0 holds the string index. The fast 1975 * path check for hit in strings cache has already been performed. 1976 */ 1977ONE_ARG_SAVE_EVERYTHING_DOWNCALL art_quick_resolve_string, artResolveStringFromCode 1978 1979 /* 1980 * Entry from managed code when uninitialized static storage, this stub will run the class 1981 * initializer and deliver the exception on error. On success the static storage base is 1982 * returned. 1983 */ 1984ONE_ARG_SAVE_EVERYTHING_DOWNCALL_FOR_CLINIT art_quick_initialize_static_storage, artInitializeStaticStorageFromCode 1985 1986 /* 1987 * Entry from managed code when dex cache misses for a type_idx. 1988 */ 1989ONE_ARG_SAVE_EVERYTHING_DOWNCALL_FOR_CLINIT art_quick_resolve_type, artResolveTypeFromCode 1990 1991 /* 1992 * Entry from managed code when type_idx needs to be checked for access and dex cache may also 1993 * miss. 1994 */ 1995ONE_ARG_SAVE_EVERYTHING_DOWNCALL art_quick_resolve_type_and_verify_access, artResolveTypeAndVerifyAccessFromCode 1996 1997 /* 1998 * Called by managed code when the value in rSUSPEND has been decremented to 0. 1999 */ 2000 .extern artTestSuspendFromCode 2001ENTRY_NO_GP art_quick_test_suspend 2002 SETUP_SAVE_EVERYTHING_FRAME RUNTIME_SAVE_EVERYTHING_FOR_SUSPEND_CHECK_METHOD_OFFSET 2003 # save everything for stack crawl 2004 jal artTestSuspendFromCode # (Thread*) 2005 move $a0, rSELF 2006 RESTORE_SAVE_EVERYTHING_FRAME 2007 jalr $zero, $ra 2008 nop 2009END art_quick_test_suspend 2010 2011 /* 2012 * Called by managed code that is attempting to call a method on a proxy class. On entry 2013 * r0 holds the proxy method; r1, r2 and r3 may contain arguments. 2014 */ 2015 .extern artQuickProxyInvokeHandler 2016ENTRY art_quick_proxy_invoke_handler 2017 SETUP_SAVE_REFS_AND_ARGS_FRAME_WITH_METHOD_IN_A0 2018 move $a2, rSELF # pass Thread::Current 2019 jal artQuickProxyInvokeHandler # (Method* proxy method, receiver, Thread*, SP) 2020 move $a3, $sp # pass $sp 2021 ld $t0, THREAD_EXCEPTION_OFFSET(rSELF) # load Thread::Current()->exception_ 2022 daddiu $sp, $sp, REFS_AND_ARGS_MINUS_REFS_SIZE # skip a0-a7 and f12-f19 2023 RESTORE_SAVE_REFS_ONLY_FRAME 2024 bne $t0, $zero, 1f 2025 dmtc1 $v0, $f0 # place return value to FP return value 2026 jalr $zero, $ra 2027 dmtc1 $v1, $f1 # place return value to FP return value 20281: 2029 DELIVER_PENDING_EXCEPTION 2030END art_quick_proxy_invoke_handler 2031 2032 /* 2033 * Called to resolve an imt conflict. 2034 * a0 is the conflict ArtMethod. 2035 * t0 is a hidden argument that holds the target interface method's dex method index. 2036 * 2037 * Mote that this stub writes to v0-v1, a0, t0-t3, t8-t9, f0-f11, f20-f23. 2038 */ 2039 .extern artLookupResolvedMethod 2040 .extern __atomic_load_16 # For __int128_t std::atomic::load(std::memory_order). 2041ENTRY art_quick_imt_conflict_trampoline 2042 SETUP_SAVE_REFS_AND_ARGS_FRAME_INTERNAL /* save_s4_thru_s8 */ 0 2043 2044 ld $t1, FRAME_SIZE_SAVE_REFS_AND_ARGS($sp) # $t1 = referrer. 2045 // If the method is obsolete, just go through the dex cache miss slow path. 2046 // The obsolete flag is set with suspended threads, so we do not need an acquire operation here. 2047 lw $t9, ART_METHOD_ACCESS_FLAGS_OFFSET($t1) # $t9 = access flags. 2048 sll $t9, $t9, 31 - ACC_OBSOLETE_METHOD_SHIFT # Move obsolete method bit to sign bit. 2049 bltzc $t9, .Limt_conflict_trampoline_dex_cache_miss 2050 lwu $t1, ART_METHOD_DECLARING_CLASS_OFFSET($t1) # $t1 = declaring class (no read barrier). 2051 lwu $t1, MIRROR_CLASS_DEX_CACHE_OFFSET($t1) # $t1 = dex cache (without read barrier). 2052 UNPOISON_HEAP_REF $t1 2053 dla $t9, __atomic_load_16 2054 ld $t1, MIRROR_DEX_CACHE_RESOLVED_METHODS_OFFSET($t1) # $t1 = dex cache methods array. 2055 2056 dext $s2, $t0, 0, 32 # $s2 = zero-extended method index 2057 # (callee-saved). 2058 ld $s3, ART_METHOD_JNI_OFFSET_64($a0) # $s3 = ImtConflictTable (callee-saved). 2059 2060 dext $t0, $t0, 0, METHOD_DEX_CACHE_HASH_BITS # $t0 = slot index. 2061 2062 li $a1, STD_MEMORY_ORDER_RELAXED # $a1 = std::memory_order_relaxed. 2063 jalr $t9 # [$v0, $v1] = __atomic_load_16($a0, $a1). 2064 dlsa $a0, $t0, $t1, POINTER_SIZE_SHIFT + 1 # $a0 = DexCache method slot address. 2065 2066 bnec $v1, $s2, .Limt_conflict_trampoline_dex_cache_miss # Branch if method index miss. 2067 2068.Limt_table_iterate: 2069 ld $t1, 0($s3) # Load next entry in ImtConflictTable. 2070 # Branch if found. 2071 beq $t1, $v0, .Limt_table_found 2072 nop 2073 # If the entry is null, the interface method is not in the ImtConflictTable. 2074 beqzc $t1, .Lconflict_trampoline 2075 # Iterate over the entries of the ImtConflictTable. 2076 daddiu $s3, $s3, 2 * __SIZEOF_POINTER__ # Iterate to the next entry. 2077 bc .Limt_table_iterate 2078 2079.Limt_table_found: 2080 # We successfully hit an entry in the table. Load the target method and jump to it. 2081 .cfi_remember_state 2082 ld $a0, __SIZEOF_POINTER__($s3) 2083 ld $t9, ART_METHOD_QUICK_CODE_OFFSET_64($a0) 2084 RESTORE_SAVE_REFS_AND_ARGS_FRAME /* restore_s4_thru_s8 */ 0 2085 jic $t9, 0 2086 .cfi_restore_state 2087 2088.Lconflict_trampoline: 2089 # Call the runtime stub to populate the ImtConflictTable and jump to the resolved method. 2090 .cfi_remember_state 2091 RESTORE_SAVE_REFS_AND_ARGS_FRAME_A1 # Restore this. 2092 move $a0, $v0 # Load interface method. 2093 INVOKE_TRAMPOLINE_BODY artInvokeInterfaceTrampoline, /* save_s4_thru_s8_only */ 1 2094 .cfi_restore_state 2095 2096.Limt_conflict_trampoline_dex_cache_miss: 2097 # We're not creating a proper runtime method frame here, 2098 # artLookupResolvedMethod() is not allowed to walk the stack. 2099 dla $t9, artLookupResolvedMethod 2100 ld $a1, FRAME_SIZE_SAVE_REFS_AND_ARGS($sp) # $a1 = referrer. 2101 jalr $t9 # (uint32_t method_index, ArtMethod* referrer). 2102 sll $a0, $s2, 0 # $a0 = sign-extended method index. 2103 2104 # If the method wasn't resolved, skip the lookup and go to artInvokeInterfaceTrampoline(). 2105 beqzc $v0, .Lconflict_trampoline 2106 nop 2107 bc .Limt_table_iterate 2108END art_quick_imt_conflict_trampoline 2109 2110 .extern artQuickResolutionTrampoline 2111ENTRY art_quick_resolution_trampoline 2112 SETUP_SAVE_REFS_AND_ARGS_FRAME 2113 move $a2, rSELF # pass Thread::Current 2114 jal artQuickResolutionTrampoline # (Method* called, receiver, Thread*, SP) 2115 move $a3, $sp # pass $sp 2116 beq $v0, $zero, 1f 2117 ld $a0, 0($sp) # load resolved method in $a0 2118 # artQuickResolutionTrampoline puts resolved method in *SP 2119 RESTORE_SAVE_REFS_AND_ARGS_FRAME 2120 move $t9, $v0 # code pointer must be in $t9 to generate the global pointer 2121 jalr $zero, $t9 # tail call to method 2122 nop 21231: 2124 RESTORE_SAVE_REFS_AND_ARGS_FRAME 2125 DELIVER_PENDING_EXCEPTION 2126END art_quick_resolution_trampoline 2127 2128 .extern artQuickGenericJniTrampoline 2129 .extern artQuickGenericJniEndTrampoline 2130ENTRY art_quick_generic_jni_trampoline 2131 SETUP_SAVE_REFS_AND_ARGS_FRAME_WITH_METHOD_IN_A0 2132 move $s8, $sp # save $sp 2133 2134 # prepare for call to artQuickGenericJniTrampoline(Thread*, SP) 2135 move $a0, rSELF # pass Thread::Current 2136 move $a1, $sp # pass $sp 2137 jal artQuickGenericJniTrampoline # (Thread*, SP) 2138 daddiu $sp, $sp, -5120 # reserve space on the stack 2139 2140 # The C call will have registered the complete save-frame on success. 2141 # The result of the call is: 2142 # v0: ptr to native code, 0 on error. 2143 # v1: ptr to the bottom of the used area of the alloca, can restore stack till here. 2144 beq $v0, $zero, 1f # check entry error 2145 move $t9, $v0 # save the code ptr 2146 move $sp, $v1 # release part of the alloca 2147 2148 # Load parameters from stack into registers 2149 ld $a0, 0($sp) 2150 ld $a1, 8($sp) 2151 ld $a2, 16($sp) 2152 ld $a3, 24($sp) 2153 ld $a4, 32($sp) 2154 ld $a5, 40($sp) 2155 ld $a6, 48($sp) 2156 ld $a7, 56($sp) 2157 # Load FPRs the same as GPRs. Look at BuildNativeCallFrameStateMachine. 2158 l.d $f12, 0($sp) 2159 l.d $f13, 8($sp) 2160 l.d $f14, 16($sp) 2161 l.d $f15, 24($sp) 2162 l.d $f16, 32($sp) 2163 l.d $f17, 40($sp) 2164 l.d $f18, 48($sp) 2165 l.d $f19, 56($sp) 2166 jalr $t9 # native call 2167 daddiu $sp, $sp, 64 2168 2169 # result sign extension is handled in C code 2170 # prepare for call to artQuickGenericJniEndTrampoline(Thread*, result, result_f) 2171 move $a0, rSELF # pass Thread::Current 2172 move $a1, $v0 2173 jal artQuickGenericJniEndTrampoline 2174 dmfc1 $a2, $f0 2175 2176 ld $t0, THREAD_EXCEPTION_OFFSET(rSELF) # load Thread::Current()->exception_ 2177 bne $t0, $zero, 1f # check for pending exceptions 2178 move $sp, $s8 # tear down the alloca 2179 2180 # tear dpown the callee-save frame 2181 RESTORE_SAVE_REFS_AND_ARGS_FRAME 2182 2183 jalr $zero, $ra 2184 dmtc1 $v0, $f0 # place return value to FP return value 2185 21861: 2187 ld $t0, THREAD_TOP_QUICK_FRAME_OFFSET(rSELF) 2188 daddiu $sp, $t0, -1 // Remove the GenericJNI tag. 2189 # This will create a new save-all frame, required by the runtime. 2190 DELIVER_PENDING_EXCEPTION 2191END art_quick_generic_jni_trampoline 2192 2193 .extern artQuickToInterpreterBridge 2194ENTRY art_quick_to_interpreter_bridge 2195 SETUP_SAVE_REFS_AND_ARGS_FRAME 2196 move $a1, rSELF # pass Thread::Current 2197 jal artQuickToInterpreterBridge # (Method* method, Thread*, SP) 2198 move $a2, $sp # pass $sp 2199 ld $t0, THREAD_EXCEPTION_OFFSET(rSELF) # load Thread::Current()->exception_ 2200 daddiu $sp, $sp, REFS_AND_ARGS_MINUS_REFS_SIZE # skip a0-a7 and f12-f19 2201 RESTORE_SAVE_REFS_ONLY_FRAME 2202 bne $t0, $zero, 1f 2203 dmtc1 $v0, $f0 # place return value to FP return value 2204 jalr $zero, $ra 2205 dmtc1 $v1, $f1 # place return value to FP return value 22061: 2207 DELIVER_PENDING_EXCEPTION 2208END art_quick_to_interpreter_bridge 2209 2210 .extern artInvokeObsoleteMethod 2211ENTRY art_invoke_obsolete_method_stub 2212 SETUP_SAVE_ALL_CALLEE_SAVES_FRAME 2213 jal artInvokeObsoleteMethod # (Method* method, Thread* self) 2214 move $a1, rSELF # pass Thread::Current 2215END art_invoke_obsolete_method_stub 2216 2217 /* 2218 * Routine that intercepts method calls and returns. 2219 */ 2220 .extern artInstrumentationMethodEntryFromCode 2221 .extern artInstrumentationMethodExitFromCode 2222ENTRY art_quick_instrumentation_entry 2223 SETUP_SAVE_REFS_AND_ARGS_FRAME 2224 # Preserve $a0 knowing there is a spare slot in kSaveRefsAndArgs. 2225 sd $a0, 8($sp) # Save arg0. 2226 move $a3, $sp # Pass $sp. 2227 jal artInstrumentationMethodEntryFromCode # (Method*, Object*, Thread*, SP) 2228 move $a2, rSELF # pass Thread::Current 2229 beqzc $v0, .Ldeliver_instrumentation_entry_exception 2230 # Deliver exception if we got nullptr as function. 2231 move $t9, $v0 # $t9 holds reference to code 2232 ld $a0, 8($sp) # Restore arg0. 2233 dla $v0, art_quick_instrumentation_exit 2234 RESTORE_SAVE_REFS_AND_ARGS_FRAME 2235 move $ra, $v0 2236 jic $t9, 0 # call method, returning to art_quick_instrumentation_exit 2237.Ldeliver_instrumentation_entry_exception: 2238 RESTORE_SAVE_REFS_AND_ARGS_FRAME 2239 DELIVER_PENDING_EXCEPTION 2240END art_quick_instrumentation_entry 2241 2242ENTRY_NO_GP art_quick_instrumentation_exit 2243 move $ra, $zero # RA points here, so clobber with 0 for later checks. 2244 SETUP_SAVE_EVERYTHING_FRAME 2245 2246 daddiu $a3, $sp, 16 # Pass fpr_res pointer ($f0 in SAVE_EVERYTHING_FRAME). 2247 daddiu $a2, $sp, 280 # Pass gpr_res pointer ($v0 in SAVE_EVERYTHING_FRAME). 2248 move $a1, $sp # Pass $sp. 2249 jal artInstrumentationMethodExitFromCode # (Thread*, SP, gpr_res*, fpr_res*) 2250 move $a0, rSELF # pass Thread::Current 2251 2252 beqzc $v0, .Ldo_deliver_instrumentation_exception 2253 # Deliver exception if we got nullptr as function. 2254 nop 2255 bnez $v1, .Ldeoptimize 2256 2257 # Normal return. 2258 sd $v0, (FRAME_SIZE_SAVE_EVERYTHING-8)($sp) # Set return pc. 2259 RESTORE_SAVE_EVERYTHING_FRAME 2260 jic $ra, 0 2261.Ldo_deliver_instrumentation_exception: 2262 DELIVER_PENDING_EXCEPTION_FRAME_READY 2263.Ldeoptimize: 2264 b art_quick_deoptimize 2265 sd $v1, (FRAME_SIZE_SAVE_EVERYTHING-8)($sp) 2266 # Fake a call from instrumentation return pc. 2267END art_quick_instrumentation_exit 2268 2269 /* 2270 * Instrumentation has requested that we deoptimize into the interpreter. The deoptimization 2271 * will long jump to the upcall with a special exception of -1. 2272 */ 2273 .extern artDeoptimize 2274ENTRY_NO_GP_CUSTOM_CFA art_quick_deoptimize, FRAME_SIZE_SAVE_EVERYTHING 2275 # SETUP_SAVE_EVERYTHING_FRAME has been done by art_quick_instrumentation_exit. 2276 .cfi_rel_offset 31, 488 2277 .cfi_rel_offset 30, 480 2278 .cfi_rel_offset 28, 472 2279 .cfi_rel_offset 25, 464 2280 .cfi_rel_offset 24, 456 2281 .cfi_rel_offset 23, 448 2282 .cfi_rel_offset 22, 440 2283 .cfi_rel_offset 21, 432 2284 .cfi_rel_offset 20, 424 2285 .cfi_rel_offset 19, 416 2286 .cfi_rel_offset 18, 408 2287 .cfi_rel_offset 17, 400 2288 .cfi_rel_offset 16, 392 2289 .cfi_rel_offset 15, 384 2290 .cfi_rel_offset 14, 376 2291 .cfi_rel_offset 13, 368 2292 .cfi_rel_offset 12, 360 2293 .cfi_rel_offset 11, 352 2294 .cfi_rel_offset 10, 344 2295 .cfi_rel_offset 9, 336 2296 .cfi_rel_offset 8, 328 2297 .cfi_rel_offset 7, 320 2298 .cfi_rel_offset 6, 312 2299 .cfi_rel_offset 5, 304 2300 .cfi_rel_offset 4, 296 2301 .cfi_rel_offset 3, 288 2302 .cfi_rel_offset 2, 280 2303 .cfi_rel_offset 1, 272 2304 2305 jal artDeoptimize # artDeoptimize(Thread*) 2306 move $a0, rSELF # pass Thread::current 2307 break 2308END art_quick_deoptimize 2309 2310 /* 2311 * Compiled code has requested that we deoptimize into the interpreter. The deoptimization 2312 * will long jump to the upcall with a special exception of -1. 2313 */ 2314 .extern artDeoptimizeFromCompiledCode 2315ENTRY_NO_GP art_quick_deoptimize_from_compiled_code 2316 SETUP_SAVE_EVERYTHING_FRAME 2317 jal artDeoptimizeFromCompiledCode # (DeoptimizationKind, Thread*) 2318 move $a1, rSELF # pass Thread::current 2319END art_quick_deoptimize_from_compiled_code 2320 2321 .set push 2322 .set noat 2323/* java.lang.String.compareTo(String anotherString) */ 2324ENTRY_NO_GP art_quick_string_compareto 2325/* $a0 holds address of "this" */ 2326/* $a1 holds address of "anotherString" */ 2327 move $a2, $zero 2328 beq $a0, $a1, .Lstring_compareto_length_diff # this and anotherString are the same object 2329 move $a3, $zero # return 0 (it returns a2 - a3) 2330 2331#if (STRING_COMPRESSION_FEATURE) 2332 lw $a4, MIRROR_STRING_COUNT_OFFSET($a0) # 'count' field of this 2333 lw $a5, MIRROR_STRING_COUNT_OFFSET($a1) # 'count' field of anotherString 2334 sra $a2, $a4, 1 # this.length() 2335 sra $a3, $a5, 1 # anotherString.length() 2336#else 2337 lw $a2, MIRROR_STRING_COUNT_OFFSET($a0) # this.length() 2338 lw $a3, MIRROR_STRING_COUNT_OFFSET($a1) # anotherString.length() 2339#endif 2340 2341 MINu $t2, $a2, $a3 2342 # $t2 now holds min(this.length(),anotherString.length()) 2343 2344 # while min(this.length(),anotherString.length())-i != 0 2345 beqzc $t2, .Lstring_compareto_length_diff # if $t2==0 2346 # return (this.length() - anotherString.length()) 2347 2348#if (STRING_COMPRESSION_FEATURE) 2349 # Differ cases: 2350 dext $a6, $a4, 0, 1 2351 beqz $a6, .Lstring_compareto_this_is_compressed 2352 dext $a6, $a5, 0, 1 # In branch delay slot. 2353 beqz $a6, .Lstring_compareto_that_is_compressed 2354 nop 2355 b .Lstring_compareto_both_not_compressed 2356 nop 2357 2358.Lstring_compareto_this_is_compressed: 2359 beqzc $a6, .Lstring_compareto_both_compressed 2360 /* If (this->IsCompressed() && that->IsCompressed() == false) */ 2361.Lstring_compareto_loop_comparison_this_compressed: 2362 lbu $t0, MIRROR_STRING_VALUE_OFFSET($a0) 2363 lhu $t1, MIRROR_STRING_VALUE_OFFSET($a1) 2364 bnec $t0, $t1, .Lstring_compareto_char_diff 2365 daddiu $a0, $a0, 1 # point at this.charAt(i++) - compressed 2366 subu $t2, $t2, 1 # new value of min(this.length(),anotherString.length())-i 2367 bnez $t2, .Lstring_compareto_loop_comparison_this_compressed 2368 daddiu $a1, $a1, 2 # point at anotherString.charAt(i++) - uncompressed 2369 jalr $zero, $ra 2370 subu $v0, $a2, $a3 # return (this.length() - anotherString.length()) 2371 2372.Lstring_compareto_that_is_compressed: 2373 lhu $t0, MIRROR_STRING_VALUE_OFFSET($a0) 2374 lbu $t1, MIRROR_STRING_VALUE_OFFSET($a1) 2375 bnec $t0, $t1, .Lstring_compareto_char_diff 2376 daddiu $a0, $a0, 2 # point at this.charAt(i++) - uncompressed 2377 subu $t2, $t2, 1 # new value of min(this.length(),anotherString.length())-i 2378 bnez $t2, .Lstring_compareto_that_is_compressed 2379 daddiu $a1, $a1, 1 # point at anotherString.charAt(i++) - compressed 2380 jalr $zero, $ra 2381 subu $v0, $a2, $a3 # return (this.length() - anotherString.length()) 2382 2383.Lstring_compareto_both_compressed: 2384 lbu $t0, MIRROR_STRING_VALUE_OFFSET($a0) 2385 lbu $t1, MIRROR_STRING_VALUE_OFFSET($a1) 2386 bnec $t0, $t1, .Lstring_compareto_char_diff 2387 daddiu $a0, $a0, 1 # point at this.charAt(i++) - compressed 2388 subu $t2, $t2, 1 # new value of min(this.length(),anotherString.length())-i 2389 bnez $t2, .Lstring_compareto_both_compressed 2390 daddiu $a1, $a1, 1 # point at anotherString.charAt(i++) - compressed 2391 jalr $zero, $ra 2392 subu $v0, $a2, $a3 # return (this.length() - anotherString.length()) 2393#endif 2394 2395.Lstring_compareto_both_not_compressed: 2396 lhu $t0, MIRROR_STRING_VALUE_OFFSET($a0) # while this.charAt(i) == anotherString.charAt(i) 2397 lhu $t1, MIRROR_STRING_VALUE_OFFSET($a1) 2398 bnec $t0, $t1, .Lstring_compareto_char_diff # if this.charAt(i) != anotherString.charAt(i) 2399 # return (this.charAt(i) - anotherString.charAt(i)) 2400 daddiu $a0, $a0, 2 # point at this.charAt(i++) 2401 subu $t2, $t2, 1 # new value of min(this.length(),anotherString.length())-i 2402 bnez $t2, .Lstring_compareto_both_not_compressed 2403 daddiu $a1, $a1, 2 # point at anotherString.charAt(i++) 2404 2405.Lstring_compareto_length_diff: 2406 jalr $zero, $ra 2407 subu $v0, $a2, $a3 # return (this.length() - anotherString.length()) 2408 2409.Lstring_compareto_char_diff: 2410 jalr $zero, $ra 2411 subu $v0, $t0, $t1 # return (this.charAt(i) - anotherString.charAt(i)) 2412END art_quick_string_compareto 2413 2414/* java.lang.String.indexOf(int ch, int fromIndex=0) */ 2415ENTRY_NO_GP art_quick_indexof 2416/* $a0 holds address of "this" */ 2417/* $a1 holds "ch" */ 2418/* $a2 holds "fromIndex" */ 2419#if (STRING_COMPRESSION_FEATURE) 2420 lw $a3, MIRROR_STRING_COUNT_OFFSET($a0) # 'count' field of this 2421#else 2422 lw $t0, MIRROR_STRING_COUNT_OFFSET($a0) # this.length() 2423#endif 2424 slt $at, $a2, $zero # if fromIndex < 0 2425 seleqz $a2, $a2, $at # fromIndex = 0; 2426#if (STRING_COMPRESSION_FEATURE) 2427 srl $t0, $a3, 1 # $a3 holds count (with flag) and $t0 holds actual length 2428#endif 2429 subu $t0, $t0, $a2 # this.length() - fromIndex 2430 blez $t0, 6f # if this.length()-fromIndex <= 0 2431 li $v0, -1 # return -1; 2432 2433#if (STRING_COMPRESSION_FEATURE) 2434 dext $a3, $a3, 0, 1 # Extract compression flag. 2435 beqzc $a3, .Lstring_indexof_compressed 2436#endif 2437 2438 sll $v0, $a2, 1 # $a0 += $a2 * 2 2439 daddu $a0, $a0, $v0 # " ditto " 2440 move $v0, $a2 # Set i to fromIndex. 2441 24421: 2443 lhu $t3, MIRROR_STRING_VALUE_OFFSET($a0) # if this.charAt(i) == ch 2444 beq $t3, $a1, 6f # return i; 2445 daddu $a0, $a0, 2 # i++ 2446 subu $t0, $t0, 1 # this.length() - i 2447 bnez $t0, 1b # while this.length() - i > 0 2448 addu $v0, $v0, 1 # i++ 2449 2450 li $v0, -1 # if this.length() - i <= 0 2451 # return -1; 2452 24536: 2454 j $ra 2455 nop 2456 2457#if (STRING_COMPRESSION_FEATURE) 2458.Lstring_indexof_compressed: 2459 move $a4, $a0 # Save a copy in $a4 to later compute result. 2460 daddu $a0, $a0, $a2 # $a0 += $a2 2461 2462.Lstring_indexof_compressed_loop: 2463 lbu $t3, MIRROR_STRING_VALUE_OFFSET($a0) 2464 beq $t3, $a1, .Lstring_indexof_compressed_matched 2465 subu $t0, $t0, 1 2466 bgtz $t0, .Lstring_indexof_compressed_loop 2467 daddu $a0, $a0, 1 2468 2469.Lstring_indexof_nomatch: 2470 jalr $zero, $ra 2471 li $v0, -1 # return -1; 2472 2473.Lstring_indexof_compressed_matched: 2474 jalr $zero, $ra 2475 dsubu $v0, $a0, $a4 # return (current - start); 2476#endif 2477END art_quick_indexof 2478 2479 /* 2480 * Create a function `name` calling the ReadBarrier::Mark routine, 2481 * getting its argument and returning its result through register 2482 * `reg`, saving and restoring all caller-save registers. 2483 */ 2484.macro READ_BARRIER_MARK_REG name, reg 2485ENTRY \name 2486 // Null check so that we can load the lock word. 2487 bnezc \reg, .Lnot_null_\name 2488 nop 2489.Lret_rb_\name: 2490 jic $ra, 0 2491.Lnot_null_\name: 2492 // Check lock word for mark bit, if marked return. 2493 lw $t9, MIRROR_OBJECT_LOCK_WORD_OFFSET(\reg) 2494 .set push 2495 .set noat 2496 sll $at, $t9, 31 - LOCK_WORD_MARK_BIT_SHIFT # Move mark bit to sign bit. 2497 bltzc $at, .Lret_rb_\name 2498#if (LOCK_WORD_STATE_SHIFT != 30) || (LOCK_WORD_STATE_FORWARDING_ADDRESS != 3) 2499 // The below code depends on the lock word state being in the highest bits 2500 // and the "forwarding address" state having all bits set. 2501#error "Unexpected lock word state shift or forwarding address state value." 2502#endif 2503 // Test that both the forwarding state bits are 1. 2504 sll $at, $t9, 1 2505 and $at, $at, $t9 # Sign bit = 1 IFF both bits are 1. 2506 bltzc $at, .Lret_forwarding_address\name 2507 .set pop 2508 2509 daddiu $sp, $sp, -320 2510 .cfi_adjust_cfa_offset 320 2511 2512 sd $ra, 312($sp) 2513 .cfi_rel_offset 31, 312 2514 sd $t8, 304($sp) # save t8 holding caller's gp 2515 .cfi_rel_offset 24, 304 2516 sd $t3, 296($sp) 2517 .cfi_rel_offset 15, 296 2518 sd $t2, 288($sp) 2519 .cfi_rel_offset 14, 288 2520 sd $t1, 280($sp) 2521 .cfi_rel_offset 13, 280 2522 sd $t0, 272($sp) 2523 .cfi_rel_offset 12, 272 2524 sd $a7, 264($sp) 2525 .cfi_rel_offset 11, 264 2526 sd $a6, 256($sp) 2527 .cfi_rel_offset 10, 256 2528 sd $a5, 248($sp) 2529 .cfi_rel_offset 9, 248 2530 sd $a4, 240($sp) 2531 .cfi_rel_offset 8, 240 2532 sd $a3, 232($sp) 2533 .cfi_rel_offset 7, 232 2534 sd $a2, 224($sp) 2535 .cfi_rel_offset 6, 224 2536 sd $a1, 216($sp) 2537 .cfi_rel_offset 5, 216 2538 sd $a0, 208($sp) 2539 .cfi_rel_offset 4, 208 2540 sd $v1, 200($sp) 2541 .cfi_rel_offset 3, 200 2542 sd $v0, 192($sp) 2543 .cfi_rel_offset 2, 192 2544 2545 dla $t9, artReadBarrierMark 2546 2547 sdc1 $f23, 184($sp) 2548 sdc1 $f22, 176($sp) 2549 sdc1 $f21, 168($sp) 2550 sdc1 $f20, 160($sp) 2551 sdc1 $f19, 152($sp) 2552 sdc1 $f18, 144($sp) 2553 sdc1 $f17, 136($sp) 2554 sdc1 $f16, 128($sp) 2555 sdc1 $f15, 120($sp) 2556 sdc1 $f14, 112($sp) 2557 sdc1 $f13, 104($sp) 2558 sdc1 $f12, 96($sp) 2559 sdc1 $f11, 88($sp) 2560 sdc1 $f10, 80($sp) 2561 sdc1 $f9, 72($sp) 2562 sdc1 $f8, 64($sp) 2563 sdc1 $f7, 56($sp) 2564 sdc1 $f6, 48($sp) 2565 sdc1 $f5, 40($sp) 2566 sdc1 $f4, 32($sp) 2567 sdc1 $f3, 24($sp) 2568 sdc1 $f2, 16($sp) 2569 sdc1 $f1, 8($sp) 2570 2571 .ifnc \reg, $a0 2572 move $a0, \reg # pass obj from `reg` in a0 2573 .endif 2574 jalr $t9 # v0 <- artReadBarrierMark(obj) 2575 sdc1 $f0, 0($sp) # in delay slot 2576 2577 ld $ra, 312($sp) 2578 .cfi_restore 31 2579 ld $t8, 304($sp) # restore t8 holding caller's gp 2580 .cfi_restore 24 2581 ld $t3, 296($sp) 2582 .cfi_restore 15 2583 ld $t2, 288($sp) 2584 .cfi_restore 14 2585 ld $t1, 280($sp) 2586 .cfi_restore 13 2587 ld $t0, 272($sp) 2588 .cfi_restore 12 2589 ld $a7, 264($sp) 2590 .cfi_restore 11 2591 ld $a6, 256($sp) 2592 .cfi_restore 10 2593 ld $a5, 248($sp) 2594 .cfi_restore 9 2595 ld $a4, 240($sp) 2596 .cfi_restore 8 2597 ld $a3, 232($sp) 2598 .cfi_restore 7 2599 ld $a2, 224($sp) 2600 .cfi_restore 6 2601 ld $a1, 216($sp) 2602 .cfi_restore 5 2603 ld $a0, 208($sp) 2604 .cfi_restore 4 2605 ld $v1, 200($sp) 2606 .cfi_restore 3 2607 2608 .ifnc \reg, $v0 2609 move \reg, $v0 # `reg` <- v0 2610 ld $v0, 192($sp) 2611 .cfi_restore 2 2612 .endif 2613 2614 ldc1 $f23, 184($sp) 2615 ldc1 $f22, 176($sp) 2616 ldc1 $f21, 168($sp) 2617 ldc1 $f20, 160($sp) 2618 ldc1 $f19, 152($sp) 2619 ldc1 $f18, 144($sp) 2620 ldc1 $f17, 136($sp) 2621 ldc1 $f16, 128($sp) 2622 ldc1 $f15, 120($sp) 2623 ldc1 $f14, 112($sp) 2624 ldc1 $f13, 104($sp) 2625 ldc1 $f12, 96($sp) 2626 ldc1 $f11, 88($sp) 2627 ldc1 $f10, 80($sp) 2628 ldc1 $f9, 72($sp) 2629 ldc1 $f8, 64($sp) 2630 ldc1 $f7, 56($sp) 2631 ldc1 $f6, 48($sp) 2632 ldc1 $f5, 40($sp) 2633 ldc1 $f4, 32($sp) 2634 ldc1 $f3, 24($sp) 2635 ldc1 $f2, 16($sp) 2636 ldc1 $f1, 8($sp) 2637 ldc1 $f0, 0($sp) 2638 2639 .cpreturn # restore caller's gp from t8 2640 jalr $zero, $ra 2641 daddiu $sp, $sp, 320 2642 .cfi_adjust_cfa_offset -320 2643 2644.Lret_forwarding_address\name: 2645 // Shift left by the forwarding address shift. This clears out the state bits since they are 2646 // in the top 2 bits of the lock word. 2647 sll \reg, $t9, LOCK_WORD_STATE_FORWARDING_ADDRESS_SHIFT 2648 jalr $zero, $ra 2649 dext \reg, \reg, 0, 32 # Make sure the address is zero-extended. 2650END \name 2651.endm 2652 2653// Note that art_quick_read_barrier_mark_regXX corresponds to register XX+1. 2654// ZERO (register 0) is reserved. 2655// AT (register 1) is reserved as a temporary/scratch register. 2656READ_BARRIER_MARK_REG art_quick_read_barrier_mark_reg01, $v0 2657READ_BARRIER_MARK_REG art_quick_read_barrier_mark_reg02, $v1 2658READ_BARRIER_MARK_REG art_quick_read_barrier_mark_reg03, $a0 2659READ_BARRIER_MARK_REG art_quick_read_barrier_mark_reg04, $a1 2660READ_BARRIER_MARK_REG art_quick_read_barrier_mark_reg05, $a2 2661READ_BARRIER_MARK_REG art_quick_read_barrier_mark_reg06, $a3 2662READ_BARRIER_MARK_REG art_quick_read_barrier_mark_reg07, $a4 2663READ_BARRIER_MARK_REG art_quick_read_barrier_mark_reg08, $a5 2664READ_BARRIER_MARK_REG art_quick_read_barrier_mark_reg09, $a6 2665READ_BARRIER_MARK_REG art_quick_read_barrier_mark_reg10, $a7 2666READ_BARRIER_MARK_REG art_quick_read_barrier_mark_reg11, $t0 2667READ_BARRIER_MARK_REG art_quick_read_barrier_mark_reg12, $t1 2668READ_BARRIER_MARK_REG art_quick_read_barrier_mark_reg13, $t2 2669// T3 (register 15) is reserved as a temporary/scratch register. 2670// S0 and S1 (registers 16 and 17) are reserved as suspended and thread registers. 2671READ_BARRIER_MARK_REG art_quick_read_barrier_mark_reg17, $s2 2672READ_BARRIER_MARK_REG art_quick_read_barrier_mark_reg18, $s3 2673READ_BARRIER_MARK_REG art_quick_read_barrier_mark_reg19, $s4 2674READ_BARRIER_MARK_REG art_quick_read_barrier_mark_reg20, $s5 2675READ_BARRIER_MARK_REG art_quick_read_barrier_mark_reg21, $s6 2676READ_BARRIER_MARK_REG art_quick_read_barrier_mark_reg22, $s7 2677// T8 and T9 (registers 24 and 25) are reserved as temporary/scratch registers. 2678// K0, K1, GP, SP (registers 26 - 29) are reserved. 2679READ_BARRIER_MARK_REG art_quick_read_barrier_mark_reg29, $s8 2680// RA (register 31) is reserved. 2681 2682// Caller code: 2683// Short constant offset/index: 2684// ld $t9, pReadBarrierMarkReg00 2685// beqzc $t9, skip_call 2686// nop 2687// jialc $t9, thunk_disp 2688// skip_call: 2689// lwu `out`, ofs(`obj`) 2690// [dsubu `out`, $zero, `out` 2691// dext `out`, `out`, 0, 32] # Unpoison reference. 2692.macro BRB_FIELD_SHORT_OFFSET_ENTRY obj 2693 # Explicit null check. May be redundant (for array elements or when the field 2694 # offset is larger than the page size, 4KB). 2695 # $ra will be adjusted to point to lwu's stack map when throwing NPE. 2696 beqzc \obj, .Lintrospection_throw_npe 2697 lapc $t3, .Lintrospection_exits # $t3 = address of .Lintrospection_exits. 2698 .set push 2699 .set noat 2700 lw $at, MIRROR_OBJECT_LOCK_WORD_OFFSET(\obj) 2701 sll $at, $at, 31 - LOCK_WORD_READ_BARRIER_STATE_SHIFT # Move barrier state bit 2702 # to sign bit. 2703 bltz $at, .Lintrospection_field_array # If gray, load reference, mark. 2704 move $t8, \obj # Move `obj` to $t8 for common code. 2705 .set pop 2706 jalr $zero, $ra # Otherwise, load-load barrier and return. 2707 sync 2708.endm 2709 2710// Caller code: 2711// Long constant offset/index: | Variable index: 2712// ld $t9, pReadBarrierMarkReg00 2713// beqz $t9, skip_call | beqz $t9, skip_call 2714// daui $t8, `obj`, ofs_hi | dlsa $t8, `index`, `obj`, 2 2715// jialc $t9, thunk_disp | jialc $t9, thunk_disp 2716// skip_call: | skip_call: 2717// lwu `out`, ofs_lo($t8) | lwu `out`, ofs($t8) 2718// [dsubu `out`, $zero, `out` | [dsubu `out`, $zero, `out` 2719// dext `out`, `out`, 0, 32] | dext `out`, `out`, 0, 32] # Unpoison reference. 2720.macro BRB_FIELD_LONG_OFFSET_ENTRY obj 2721 # No explicit null check for variable indices or large constant indices/offsets 2722 # as it must have been done earlier. 2723 lapc $t3, .Lintrospection_exits # $t3 = address of .Lintrospection_exits. 2724 .set push 2725 .set noat 2726 lw $at, MIRROR_OBJECT_LOCK_WORD_OFFSET(\obj) 2727 sll $at, $at, 31 - LOCK_WORD_READ_BARRIER_STATE_SHIFT # Move barrier state bit 2728 # to sign bit. 2729 bltzc $at, .Lintrospection_field_array # If gray, load reference, mark. 2730 .set pop 2731 sync # Otherwise, load-load barrier and return. 2732 jic $ra, 0 2733 break # Padding to 8 instructions. 2734 break 2735.endm 2736 2737.macro BRB_GC_ROOT_ENTRY root 2738 lapc $t3, .Lintrospection_exit_\root # $t3 = exit point address. 2739 bnez \root, .Lintrospection_common 2740 move $t8, \root # Move reference to $t8 for common code. 2741 jic $ra, 0 # Return if null. 2742.endm 2743 2744.macro BRB_FIELD_EXIT out 2745.Lintrospection_exit_\out: 2746 jalr $zero, $ra 2747 move \out, $t8 # Return reference in expected register. 2748.endm 2749 2750.macro BRB_FIELD_EXIT_BREAK 2751 break 2752 break 2753.endm 2754 2755ENTRY_NO_GP art_quick_read_barrier_mark_introspection 2756 # Entry points for offsets/indices not fitting into int16_t and for variable indices. 2757 BRB_FIELD_LONG_OFFSET_ENTRY $v0 2758 BRB_FIELD_LONG_OFFSET_ENTRY $v1 2759 BRB_FIELD_LONG_OFFSET_ENTRY $a0 2760 BRB_FIELD_LONG_OFFSET_ENTRY $a1 2761 BRB_FIELD_LONG_OFFSET_ENTRY $a2 2762 BRB_FIELD_LONG_OFFSET_ENTRY $a3 2763 BRB_FIELD_LONG_OFFSET_ENTRY $a4 2764 BRB_FIELD_LONG_OFFSET_ENTRY $a5 2765 BRB_FIELD_LONG_OFFSET_ENTRY $a6 2766 BRB_FIELD_LONG_OFFSET_ENTRY $a7 2767 BRB_FIELD_LONG_OFFSET_ENTRY $t0 2768 BRB_FIELD_LONG_OFFSET_ENTRY $t1 2769 BRB_FIELD_LONG_OFFSET_ENTRY $t2 2770 BRB_FIELD_LONG_OFFSET_ENTRY $s2 2771 BRB_FIELD_LONG_OFFSET_ENTRY $s3 2772 BRB_FIELD_LONG_OFFSET_ENTRY $s4 2773 BRB_FIELD_LONG_OFFSET_ENTRY $s5 2774 BRB_FIELD_LONG_OFFSET_ENTRY $s6 2775 BRB_FIELD_LONG_OFFSET_ENTRY $s7 2776 BRB_FIELD_LONG_OFFSET_ENTRY $s8 2777 2778 # Entry points for offsets/indices fitting into int16_t. 2779 BRB_FIELD_SHORT_OFFSET_ENTRY $v0 2780 BRB_FIELD_SHORT_OFFSET_ENTRY $v1 2781 BRB_FIELD_SHORT_OFFSET_ENTRY $a0 2782 BRB_FIELD_SHORT_OFFSET_ENTRY $a1 2783 BRB_FIELD_SHORT_OFFSET_ENTRY $a2 2784 BRB_FIELD_SHORT_OFFSET_ENTRY $a3 2785 BRB_FIELD_SHORT_OFFSET_ENTRY $a4 2786 BRB_FIELD_SHORT_OFFSET_ENTRY $a5 2787 BRB_FIELD_SHORT_OFFSET_ENTRY $a6 2788 BRB_FIELD_SHORT_OFFSET_ENTRY $a7 2789 BRB_FIELD_SHORT_OFFSET_ENTRY $t0 2790 BRB_FIELD_SHORT_OFFSET_ENTRY $t1 2791 BRB_FIELD_SHORT_OFFSET_ENTRY $t2 2792 BRB_FIELD_SHORT_OFFSET_ENTRY $s2 2793 BRB_FIELD_SHORT_OFFSET_ENTRY $s3 2794 BRB_FIELD_SHORT_OFFSET_ENTRY $s4 2795 BRB_FIELD_SHORT_OFFSET_ENTRY $s5 2796 BRB_FIELD_SHORT_OFFSET_ENTRY $s6 2797 BRB_FIELD_SHORT_OFFSET_ENTRY $s7 2798 BRB_FIELD_SHORT_OFFSET_ENTRY $s8 2799 2800 .global art_quick_read_barrier_mark_introspection_gc_roots 2801art_quick_read_barrier_mark_introspection_gc_roots: 2802 # Entry points for GC roots. 2803 BRB_GC_ROOT_ENTRY $v0 2804 BRB_GC_ROOT_ENTRY $v1 2805 BRB_GC_ROOT_ENTRY $a0 2806 BRB_GC_ROOT_ENTRY $a1 2807 BRB_GC_ROOT_ENTRY $a2 2808 BRB_GC_ROOT_ENTRY $a3 2809 BRB_GC_ROOT_ENTRY $a4 2810 BRB_GC_ROOT_ENTRY $a5 2811 BRB_GC_ROOT_ENTRY $a6 2812 BRB_GC_ROOT_ENTRY $a7 2813 BRB_GC_ROOT_ENTRY $t0 2814 BRB_GC_ROOT_ENTRY $t1 2815 BRB_GC_ROOT_ENTRY $t2 2816 BRB_GC_ROOT_ENTRY $s2 2817 BRB_GC_ROOT_ENTRY $s3 2818 BRB_GC_ROOT_ENTRY $s4 2819 BRB_GC_ROOT_ENTRY $s5 2820 BRB_GC_ROOT_ENTRY $s6 2821 BRB_GC_ROOT_ENTRY $s7 2822 BRB_GC_ROOT_ENTRY $s8 2823 .global art_quick_read_barrier_mark_introspection_end_of_entries 2824art_quick_read_barrier_mark_introspection_end_of_entries: 2825 2826.Lintrospection_throw_npe: 2827 b art_quick_throw_null_pointer_exception 2828 daddiu $ra, $ra, 4 # Skip lwu, make $ra point to lwu's stack map. 2829 2830 .set push 2831 .set noat 2832 2833 // Fields and array elements. 2834 2835.Lintrospection_field_array: 2836 // Get the field/element address using $t8 and the offset from the lwu instruction. 2837 lh $at, 0($ra) # $ra points to lwu: $at = low 16 bits of field/element offset. 2838 daddiu $ra, $ra, 4 + HEAP_POISON_INSTR_SIZE # Skip lwu(+dsubu+dext). 2839 daddu $t8, $t8, $at # $t8 = field/element address. 2840 2841 // Calculate the address of the exit point, store it in $t3 and load the reference into $t8. 2842 lb $at, (-HEAP_POISON_INSTR_SIZE - 2)($ra) # $ra-HEAP_POISON_INSTR_SIZE-4 points to 2843 # "lwu `out`, ...". 2844 andi $at, $at, 31 # Extract `out` from lwu. 2845 2846 lwu $t8, 0($t8) # $t8 = reference. 2847 UNPOISON_HEAP_REF $t8 2848 2849 // Return if null reference. 2850 bnez $t8, .Lintrospection_common 2851 dlsa $t3, $at, $t3, 3 # $t3 = address of the exit point 2852 # (BRB_FIELD_EXIT* macro is 8 bytes). 2853 2854 // Early return through the exit point. 2855.Lintrospection_return_early: 2856 jic $t3, 0 # Move $t8 to `out` and return. 2857 2858 // Code common for GC roots, fields and array elements. 2859 2860.Lintrospection_common: 2861 // Check lock word for mark bit, if marked return. 2862 lw $t9, MIRROR_OBJECT_LOCK_WORD_OFFSET($t8) 2863 sll $at, $t9, 31 - LOCK_WORD_MARK_BIT_SHIFT # Move mark bit to sign bit. 2864 bltzc $at, .Lintrospection_return_early 2865#if (LOCK_WORD_STATE_SHIFT != 30) || (LOCK_WORD_STATE_FORWARDING_ADDRESS != 3) 2866 // The below code depends on the lock word state being in the highest bits 2867 // and the "forwarding address" state having all bits set. 2868#error "Unexpected lock word state shift or forwarding address state value." 2869#endif 2870 // Test that both the forwarding state bits are 1. 2871 sll $at, $t9, 1 2872 and $at, $at, $t9 # Sign bit = 1 IFF both bits are 1. 2873 bgezc $at, .Lintrospection_mark 2874 2875 .set pop 2876 2877 // Shift left by the forwarding address shift. This clears out the state bits since they are 2878 // in the top 2 bits of the lock word. 2879 sll $t8, $t9, LOCK_WORD_STATE_FORWARDING_ADDRESS_SHIFT 2880 jalr $zero, $t3 # Move $t8 to `out` and return. 2881 dext $t8, $t8, 0, 32 # Make sure the address is zero-extended. 2882 2883.Lintrospection_mark: 2884 // Partially set up the stack frame preserving only $ra. 2885 daddiu $sp, $sp, -320 2886 .cfi_adjust_cfa_offset 320 2887 sd $ra, 312($sp) 2888 .cfi_rel_offset 31, 312 2889 2890 // Set up $gp, clobbering $ra. 2891 lapc $ra, 1f 28921: 2893 .cpsetup $ra, 304, 1b # Save old $gp in 304($sp). 2894 2895 // Finalize the stack frame and call. 2896 sd $t3, 296($sp) # Preserve the exit point address. 2897 sd $t2, 288($sp) 2898 .cfi_rel_offset 14, 288 2899 sd $t1, 280($sp) 2900 .cfi_rel_offset 13, 280 2901 sd $t0, 272($sp) 2902 .cfi_rel_offset 12, 272 2903 sd $a7, 264($sp) 2904 .cfi_rel_offset 11, 264 2905 sd $a6, 256($sp) 2906 .cfi_rel_offset 10, 256 2907 sd $a5, 248($sp) 2908 .cfi_rel_offset 9, 248 2909 sd $a4, 240($sp) 2910 .cfi_rel_offset 8, 240 2911 sd $a3, 232($sp) 2912 .cfi_rel_offset 7, 232 2913 sd $a2, 224($sp) 2914 .cfi_rel_offset 6, 224 2915 sd $a1, 216($sp) 2916 .cfi_rel_offset 5, 216 2917 sd $a0, 208($sp) 2918 .cfi_rel_offset 4, 208 2919 sd $v1, 200($sp) 2920 .cfi_rel_offset 3, 200 2921 sd $v0, 192($sp) 2922 .cfi_rel_offset 2, 192 2923 2924 dla $t9, artReadBarrierMark 2925 2926 sdc1 $f23, 184($sp) 2927 sdc1 $f22, 176($sp) 2928 sdc1 $f21, 168($sp) 2929 sdc1 $f20, 160($sp) 2930 sdc1 $f19, 152($sp) 2931 sdc1 $f18, 144($sp) 2932 sdc1 $f17, 136($sp) 2933 sdc1 $f16, 128($sp) 2934 sdc1 $f15, 120($sp) 2935 sdc1 $f14, 112($sp) 2936 sdc1 $f13, 104($sp) 2937 sdc1 $f12, 96($sp) 2938 sdc1 $f11, 88($sp) 2939 sdc1 $f10, 80($sp) 2940 sdc1 $f9, 72($sp) 2941 sdc1 $f8, 64($sp) 2942 sdc1 $f7, 56($sp) 2943 sdc1 $f6, 48($sp) 2944 sdc1 $f5, 40($sp) 2945 sdc1 $f4, 32($sp) 2946 sdc1 $f3, 24($sp) 2947 sdc1 $f2, 16($sp) 2948 sdc1 $f1, 8($sp) 2949 sdc1 $f0, 0($sp) 2950 2951 jalr $t9 # $v0 <- artReadBarrierMark(reference) 2952 move $a0, $t8 # Pass reference in $a0. 2953 move $t8, $v0 2954 2955 ld $ra, 312($sp) 2956 .cfi_restore 31 2957 .cpreturn # Restore old $gp from 304($sp). 2958 ld $t3, 296($sp) # $t3 = address of the exit point. 2959 ld $t2, 288($sp) 2960 .cfi_restore 14 2961 ld $t1, 280($sp) 2962 .cfi_restore 13 2963 ld $t0, 272($sp) 2964 .cfi_restore 12 2965 ld $a7, 264($sp) 2966 .cfi_restore 11 2967 ld $a6, 256($sp) 2968 .cfi_restore 10 2969 ld $a5, 248($sp) 2970 .cfi_restore 9 2971 ld $a4, 240($sp) 2972 .cfi_restore 8 2973 ld $a3, 232($sp) 2974 .cfi_restore 7 2975 ld $a2, 224($sp) 2976 .cfi_restore 6 2977 ld $a1, 216($sp) 2978 .cfi_restore 5 2979 ld $a0, 208($sp) 2980 .cfi_restore 4 2981 ld $v1, 200($sp) 2982 .cfi_restore 3 2983 ld $v0, 192($sp) 2984 .cfi_restore 2 2985 2986 ldc1 $f23, 184($sp) 2987 ldc1 $f22, 176($sp) 2988 ldc1 $f21, 168($sp) 2989 ldc1 $f20, 160($sp) 2990 ldc1 $f19, 152($sp) 2991 ldc1 $f18, 144($sp) 2992 ldc1 $f17, 136($sp) 2993 ldc1 $f16, 128($sp) 2994 ldc1 $f15, 120($sp) 2995 ldc1 $f14, 112($sp) 2996 ldc1 $f13, 104($sp) 2997 ldc1 $f12, 96($sp) 2998 ldc1 $f11, 88($sp) 2999 ldc1 $f10, 80($sp) 3000 ldc1 $f9, 72($sp) 3001 ldc1 $f8, 64($sp) 3002 ldc1 $f7, 56($sp) 3003 ldc1 $f6, 48($sp) 3004 ldc1 $f5, 40($sp) 3005 ldc1 $f4, 32($sp) 3006 ldc1 $f3, 24($sp) 3007 ldc1 $f2, 16($sp) 3008 ldc1 $f1, 8($sp) 3009 ldc1 $f0, 0($sp) 3010 3011 // Return through the exit point. 3012 jalr $zero, $t3 # Move $t8 to `out` and return. 3013 daddiu $sp, $sp, 320 3014 .cfi_adjust_cfa_offset -320 3015 3016.Lintrospection_exits: 3017 BRB_FIELD_EXIT_BREAK 3018 BRB_FIELD_EXIT_BREAK 3019 BRB_FIELD_EXIT $v0 3020 BRB_FIELD_EXIT $v1 3021 BRB_FIELD_EXIT $a0 3022 BRB_FIELD_EXIT $a1 3023 BRB_FIELD_EXIT $a2 3024 BRB_FIELD_EXIT $a3 3025 BRB_FIELD_EXIT $a4 3026 BRB_FIELD_EXIT $a5 3027 BRB_FIELD_EXIT $a6 3028 BRB_FIELD_EXIT $a7 3029 BRB_FIELD_EXIT $t0 3030 BRB_FIELD_EXIT $t1 3031 BRB_FIELD_EXIT $t2 3032 BRB_FIELD_EXIT_BREAK 3033 BRB_FIELD_EXIT_BREAK 3034 BRB_FIELD_EXIT_BREAK 3035 BRB_FIELD_EXIT $s2 3036 BRB_FIELD_EXIT $s3 3037 BRB_FIELD_EXIT $s4 3038 BRB_FIELD_EXIT $s5 3039 BRB_FIELD_EXIT $s6 3040 BRB_FIELD_EXIT $s7 3041 BRB_FIELD_EXIT_BREAK 3042 BRB_FIELD_EXIT_BREAK 3043 BRB_FIELD_EXIT_BREAK 3044 BRB_FIELD_EXIT_BREAK 3045 BRB_FIELD_EXIT_BREAK 3046 BRB_FIELD_EXIT_BREAK 3047 BRB_FIELD_EXIT $s8 3048 BRB_FIELD_EXIT_BREAK 3049END art_quick_read_barrier_mark_introspection 3050 3051 /* 3052 * Polymorphic method invocation. 3053 * On entry: 3054 * a0 = unused 3055 * a1 = receiver 3056 */ 3057.extern artInvokePolymorphic 3058ENTRY art_quick_invoke_polymorphic 3059 SETUP_SAVE_REFS_AND_ARGS_FRAME 3060 move $a0, $a1 # Make $a0 the receiver 3061 move $a1, rSELF # Make $a1 an alias for the current Thread. 3062 jal artInvokePolymorphic # artInvokePolymorphic(receiver, Thread*, context) 3063 move $a2, $sp # Make $a3 a pointer to the saved frame context. 3064 ld $t0, THREAD_EXCEPTION_OFFSET(rSELF) # load Thread::Current()->exception_ 3065 daddiu $sp, $sp, REFS_AND_ARGS_MINUS_REFS_SIZE # skip a0-a7 and f12-f19 3066 RESTORE_SAVE_REFS_ONLY_FRAME 3067 bne $t0, $zero, 1f 3068 dmtc1 $v0, $f0 # place return value to FP return value 3069 jalr $zero, $ra 3070 dmtc1 $v1, $f1 # place return value to FP return value 30711: 3072 DELIVER_PENDING_EXCEPTION 3073END art_quick_invoke_polymorphic 3074 3075 /* 3076 * InvokeCustom invocation. 3077 * On entry: 3078 * a0 = call_site_idx 3079 */ 3080.extern artInvokeCustom 3081ENTRY art_quick_invoke_custom 3082 SETUP_SAVE_REFS_AND_ARGS_FRAME 3083 move $a1, rSELF # Make $a1 an alias for the current Thread. 3084 jal artInvokeCustom # Call artInvokeCustom(call_site_idx, Thread*, context). 3085 move $a2, $sp # Make $a1 a pointer to the saved frame context. 3086 ld $t0, THREAD_EXCEPTION_OFFSET(rSELF) # load Thread::Current()->exception_ 3087 daddiu $sp, $sp, REFS_AND_ARGS_MINUS_REFS_SIZE # skip a0-a7 and f12-f19 3088 RESTORE_SAVE_REFS_ONLY_FRAME 3089 bne $t0, $zero, 1f 3090 dmtc1 $v0, $f0 # place return value to FP return value 3091 jalr $zero, $ra 3092 dmtc1 $v1, $f1 # place return value to FP return value 30931: 3094 DELIVER_PENDING_EXCEPTION 3095END art_quick_invoke_polymorphic 3096 .set pop 3097