1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #if V8_TARGET_ARCH_ARM
6
7 #include "src/api-arguments-inl.h"
8 #include "src/assembler-inl.h"
9 #include "src/base/bits.h"
10 #include "src/bootstrapper.h"
11 #include "src/code-stubs.h"
12 #include "src/counters.h"
13 #include "src/double.h"
14 #include "src/frame-constants.h"
15 #include "src/frames.h"
16 #include "src/heap/heap-inl.h"
17 #include "src/ic/ic.h"
18 #include "src/ic/stub-cache.h"
19 #include "src/isolate.h"
20 #include "src/objects/api-callbacks.h"
21 #include "src/objects/regexp-match-info.h"
22 #include "src/regexp/jsregexp.h"
23 #include "src/regexp/regexp-macro-assembler.h"
24 #include "src/runtime/runtime.h"
25
26 #include "src/arm/code-stubs-arm.h" // Cannot be the first include.
27
28 namespace v8 {
29 namespace internal {
30
31 #define __ ACCESS_MASM(masm)
32
Generate(MacroAssembler * masm)33 void JSEntryStub::Generate(MacroAssembler* masm) {
34 // r0: code entry
35 // r1: function
36 // r2: receiver
37 // r3: argc
38 // [sp+0]: argv
39
40 Label invoke, handler_entry, exit;
41
42 {
43 NoRootArrayScope no_root_array(masm);
44
45 ProfileEntryHookStub::MaybeCallEntryHook(masm);
46
47 // Called from C, so do not pop argc and args on exit (preserve sp)
48 // No need to save register-passed args
49 // Save callee-saved registers (incl. cp and fp), sp, and lr
50 __ stm(db_w, sp, kCalleeSaved | lr.bit());
51
52 // Save callee-saved vfp registers.
53 __ vstm(db_w, sp, kFirstCalleeSavedDoubleReg, kLastCalleeSavedDoubleReg);
54 // Set up the reserved register for 0.0.
55 __ vmov(kDoubleRegZero, Double(0.0));
56
57 __ InitializeRootRegister();
58 }
59
60 // Get address of argv, see stm above.
61 // r0: code entry
62 // r1: function
63 // r2: receiver
64 // r3: argc
65
66 // Set up argv in r4.
67 int offset_to_argv = (kNumCalleeSaved + 1) * kPointerSize;
68 offset_to_argv += kNumDoubleCalleeSaved * kDoubleSize;
69 __ ldr(r4, MemOperand(sp, offset_to_argv));
70
71 // Push a frame with special values setup to mark it as an entry frame.
72 // r0: code entry
73 // r1: function
74 // r2: receiver
75 // r3: argc
76 // r4: argv
77 StackFrame::Type marker = type();
78 __ mov(r7, Operand(StackFrame::TypeToMarker(marker)));
79 __ mov(r6, Operand(StackFrame::TypeToMarker(marker)));
80 __ mov(r5, Operand(ExternalReference::Create(
81 IsolateAddressId::kCEntryFPAddress, isolate())));
82 __ ldr(r5, MemOperand(r5));
83 {
84 UseScratchRegisterScope temps(masm);
85 Register scratch = temps.Acquire();
86
87 // Push a bad frame pointer to fail if it is used.
88 __ mov(scratch, Operand(-1));
89 __ stm(db_w, sp, r5.bit() | r6.bit() | r7.bit() | scratch.bit());
90 }
91
92 Register scratch = r6;
93
94 // Set up frame pointer for the frame to be pushed.
95 __ add(fp, sp, Operand(-EntryFrameConstants::kCallerFPOffset));
96
97 // If this is the outermost JS call, set js_entry_sp value.
98 Label non_outermost_js;
99 ExternalReference js_entry_sp =
100 ExternalReference::Create(IsolateAddressId::kJSEntrySPAddress, isolate());
101 __ mov(r5, Operand(ExternalReference(js_entry_sp)));
102 __ ldr(scratch, MemOperand(r5));
103 __ cmp(scratch, Operand::Zero());
104 __ b(ne, &non_outermost_js);
105 __ str(fp, MemOperand(r5));
106 __ mov(scratch, Operand(StackFrame::OUTERMOST_JSENTRY_FRAME));
107 Label cont;
108 __ b(&cont);
109 __ bind(&non_outermost_js);
110 __ mov(scratch, Operand(StackFrame::INNER_JSENTRY_FRAME));
111 __ bind(&cont);
112 __ push(scratch);
113
114 // Jump to a faked try block that does the invoke, with a faked catch
115 // block that sets the pending exception.
116 __ jmp(&invoke);
117
118 // Block literal pool emission whilst taking the position of the handler
119 // entry. This avoids making the assumption that literal pools are always
120 // emitted after an instruction is emitted, rather than before.
121 {
122 Assembler::BlockConstPoolScope block_const_pool(masm);
123 __ bind(&handler_entry);
124 handler_offset_ = handler_entry.pos();
125 // Caught exception: Store result (exception) in the pending exception
126 // field in the JSEnv and return a failure sentinel. Coming in here the
127 // fp will be invalid because the PushStackHandler below sets it to 0 to
128 // signal the existence of the JSEntry frame.
129 __ mov(scratch,
130 Operand(ExternalReference::Create(
131 IsolateAddressId::kPendingExceptionAddress, isolate())));
132 }
133 __ str(r0, MemOperand(scratch));
134 __ LoadRoot(r0, Heap::kExceptionRootIndex);
135 __ b(&exit);
136
137 // Invoke: Link this frame into the handler chain.
138 __ bind(&invoke);
139 // Must preserve r0-r4, r5-r6 are available.
140 __ PushStackHandler();
141 // If an exception not caught by another handler occurs, this handler
142 // returns control to the code after the bl(&invoke) above, which
143 // restores all kCalleeSaved registers (including cp and fp) to their
144 // saved values before returning a failure to C.
145
146 // Invoke the function by calling through JS entry trampoline builtin.
147 // Notice that we cannot store a reference to the trampoline code directly in
148 // this stub, because runtime stubs are not traversed when doing GC.
149
150 // Expected registers by Builtins::JSEntryTrampoline
151 // r0: code entry
152 // r1: function
153 // r2: receiver
154 // r3: argc
155 // r4: argv
156 __ Call(EntryTrampoline(), RelocInfo::CODE_TARGET);
157
158 // Unlink this frame from the handler chain.
159 __ PopStackHandler();
160
161 __ bind(&exit); // r0 holds result
162 // Check if the current stack frame is marked as the outermost JS frame.
163 Label non_outermost_js_2;
164 __ pop(r5);
165 __ cmp(r5, Operand(StackFrame::OUTERMOST_JSENTRY_FRAME));
166 __ b(ne, &non_outermost_js_2);
167 __ mov(r6, Operand::Zero());
168 __ mov(r5, Operand(ExternalReference(js_entry_sp)));
169 __ str(r6, MemOperand(r5));
170 __ bind(&non_outermost_js_2);
171
172 // Restore the top frame descriptors from the stack.
173 __ pop(r3);
174 __ mov(scratch, Operand(ExternalReference::Create(
175 IsolateAddressId::kCEntryFPAddress, isolate())));
176 __ str(r3, MemOperand(scratch));
177
178 // Reset the stack to the callee saved registers.
179 __ add(sp, sp, Operand(-EntryFrameConstants::kCallerFPOffset));
180
181 // Restore callee-saved registers and return.
182 #ifdef DEBUG
183 if (FLAG_debug_code) {
184 __ mov(lr, Operand(pc));
185 }
186 #endif
187
188 // Restore callee-saved vfp registers.
189 __ vldm(ia_w, sp, kFirstCalleeSavedDoubleReg, kLastCalleeSavedDoubleReg);
190
191 __ ldm(ia_w, sp, kCalleeSaved | pc.bit());
192 }
193
Generate(MacroAssembler * masm)194 void DirectCEntryStub::Generate(MacroAssembler* masm) {
195 // Place the return address on the stack, making the call
196 // GC safe. The RegExp backend also relies on this.
197 __ str(lr, MemOperand(sp, 0));
198 __ blx(ip); // Call the C++ function.
199 __ ldr(pc, MemOperand(sp, 0));
200 }
201
202
GenerateCall(MacroAssembler * masm,Register target)203 void DirectCEntryStub::GenerateCall(MacroAssembler* masm,
204 Register target) {
205 if (FLAG_embedded_builtins) {
206 if (masm->root_array_available() &&
207 isolate()->ShouldLoadConstantsFromRootList()) {
208 // This is basically an inlined version of Call(Handle<Code>) that loads
209 // the code object into lr instead of ip.
210 __ Move(ip, target);
211 __ IndirectLoadConstant(lr, GetCode());
212 __ add(lr, lr, Operand(Code::kHeaderSize - kHeapObjectTag));
213 __ blx(lr);
214 return;
215 }
216 }
217 intptr_t code =
218 reinterpret_cast<intptr_t>(GetCode().location());
219 __ Move(ip, target);
220 __ mov(lr, Operand(code, RelocInfo::CODE_TARGET));
221 __ blx(lr); // Call the stub.
222 }
223
224
MaybeCallEntryHookDelayed(TurboAssembler * tasm,Zone * zone)225 void ProfileEntryHookStub::MaybeCallEntryHookDelayed(TurboAssembler* tasm,
226 Zone* zone) {
227 if (tasm->isolate()->function_entry_hook() != nullptr) {
228 tasm->MaybeCheckConstPool();
229 PredictableCodeSizeScope predictable(
230 tasm, TurboAssembler::kCallStubSize + 2 * kInstrSize);
231 tasm->push(lr);
232 tasm->CallStubDelayed(new (zone) ProfileEntryHookStub(nullptr));
233 tasm->pop(lr);
234 }
235 }
236
MaybeCallEntryHook(MacroAssembler * masm)237 void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) {
238 if (masm->isolate()->function_entry_hook() != nullptr) {
239 ProfileEntryHookStub stub(masm->isolate());
240 masm->MaybeCheckConstPool();
241 PredictableCodeSizeScope predictable(
242 masm, TurboAssembler::kCallStubSize + 2 * kInstrSize);
243 __ push(lr);
244 __ CallStub(&stub);
245 __ pop(lr);
246 }
247 }
248
249
Generate(MacroAssembler * masm)250 void ProfileEntryHookStub::Generate(MacroAssembler* masm) {
251 // The entry hook is a "push lr" instruction, followed by a call.
252 const int32_t kReturnAddressDistanceFromFunctionStart = 3 * kInstrSize;
253
254 // This should contain all kCallerSaved registers.
255 const RegList kSavedRegs =
256 1 << 0 | // r0
257 1 << 1 | // r1
258 1 << 2 | // r2
259 1 << 3 | // r3
260 1 << 5 | // r5
261 1 << 9; // r9
262 // We also save lr, so the count here is one higher than the mask indicates.
263 const int32_t kNumSavedRegs = 7;
264
265 DCHECK_EQ(kCallerSaved & kSavedRegs, kCallerSaved);
266
267 // Save all caller-save registers as this may be called from anywhere.
268 __ stm(db_w, sp, kSavedRegs | lr.bit());
269
270 // Compute the function's address for the first argument.
271 __ sub(r0, lr, Operand(kReturnAddressDistanceFromFunctionStart));
272
273 // The caller's return address is above the saved temporaries.
274 // Grab that for the second argument to the hook.
275 __ add(r1, sp, Operand(kNumSavedRegs * kPointerSize));
276
277 // Align the stack if necessary.
278 int frame_alignment = masm->ActivationFrameAlignment();
279 if (frame_alignment > kPointerSize) {
280 __ mov(r5, sp);
281 DCHECK(base::bits::IsPowerOfTwo(frame_alignment));
282 __ and_(sp, sp, Operand(-frame_alignment));
283 }
284
285 {
286 UseScratchRegisterScope temps(masm);
287 Register scratch = temps.Acquire();
288
289 #if V8_HOST_ARCH_ARM
290 int32_t entry_hook =
291 reinterpret_cast<int32_t>(isolate()->function_entry_hook());
292 __ mov(scratch, Operand(entry_hook));
293 #else
294 // Under the simulator we need to indirect the entry hook through a
295 // trampoline function at a known address.
296 // It additionally takes an isolate as a third parameter
297 __ mov(r2, Operand(ExternalReference::isolate_address(isolate())));
298
299 ApiFunction dispatcher(FUNCTION_ADDR(EntryHookTrampoline));
300 __ mov(scratch, Operand(ExternalReference::Create(
301 &dispatcher, ExternalReference::BUILTIN_CALL)));
302 #endif
303 __ Call(scratch);
304 }
305
306 // Restore the stack pointer if needed.
307 if (frame_alignment > kPointerSize) {
308 __ mov(sp, r5);
309 }
310
311 // Also pop pc to get Ret(0).
312 __ ldm(ia_w, sp, kSavedRegs | pc.bit());
313 }
314
AddressOffset(ExternalReference ref0,ExternalReference ref1)315 static int AddressOffset(ExternalReference ref0, ExternalReference ref1) {
316 return ref0.address() - ref1.address();
317 }
318
319
320 // Calls an API function. Allocates HandleScope, extracts returned value
321 // from handle and propagates exceptions. Restores context. stack_space
322 // - space to be unwound on exit (includes the call JS arguments space and
323 // the additional space allocated for the fast call).
CallApiFunctionAndReturn(MacroAssembler * masm,Register function_address,ExternalReference thunk_ref,int stack_space,MemOperand * stack_space_operand,MemOperand return_value_operand)324 static void CallApiFunctionAndReturn(MacroAssembler* masm,
325 Register function_address,
326 ExternalReference thunk_ref,
327 int stack_space,
328 MemOperand* stack_space_operand,
329 MemOperand return_value_operand) {
330 Isolate* isolate = masm->isolate();
331 ExternalReference next_address =
332 ExternalReference::handle_scope_next_address(isolate);
333 const int kNextOffset = 0;
334 const int kLimitOffset = AddressOffset(
335 ExternalReference::handle_scope_limit_address(isolate), next_address);
336 const int kLevelOffset = AddressOffset(
337 ExternalReference::handle_scope_level_address(isolate), next_address);
338
339 DCHECK(function_address == r1 || function_address == r2);
340
341 Label profiler_disabled;
342 Label end_profiler_check;
343 __ Move(r9, ExternalReference::is_profiling_address(isolate));
344 __ ldrb(r9, MemOperand(r9, 0));
345 __ cmp(r9, Operand(0));
346 __ b(eq, &profiler_disabled);
347
348 // Additional parameter is the address of the actual callback.
349 __ Move(r3, thunk_ref);
350 __ jmp(&end_profiler_check);
351
352 __ bind(&profiler_disabled);
353 __ Move(r3, function_address);
354 __ bind(&end_profiler_check);
355
356 // Allocate HandleScope in callee-save registers.
357 __ Move(r9, next_address);
358 __ ldr(r4, MemOperand(r9, kNextOffset));
359 __ ldr(r5, MemOperand(r9, kLimitOffset));
360 __ ldr(r6, MemOperand(r9, kLevelOffset));
361 __ add(r6, r6, Operand(1));
362 __ str(r6, MemOperand(r9, kLevelOffset));
363
364 if (FLAG_log_timer_events) {
365 FrameScope frame(masm, StackFrame::MANUAL);
366 __ PushSafepointRegisters();
367 __ PrepareCallCFunction(1);
368 __ Move(r0, ExternalReference::isolate_address(isolate));
369 __ CallCFunction(ExternalReference::log_enter_external_function(), 1);
370 __ PopSafepointRegisters();
371 }
372
373 // Native call returns to the DirectCEntry stub which redirects to the
374 // return address pushed on stack (could have moved after GC).
375 // DirectCEntry stub itself is generated early and never moves.
376 DirectCEntryStub stub(isolate);
377 stub.GenerateCall(masm, r3);
378
379 if (FLAG_log_timer_events) {
380 FrameScope frame(masm, StackFrame::MANUAL);
381 __ PushSafepointRegisters();
382 __ PrepareCallCFunction(1);
383 __ Move(r0, ExternalReference::isolate_address(isolate));
384 __ CallCFunction(ExternalReference::log_leave_external_function(), 1);
385 __ PopSafepointRegisters();
386 }
387
388 Label promote_scheduled_exception;
389 Label delete_allocated_handles;
390 Label leave_exit_frame;
391 Label return_value_loaded;
392
393 // load value from ReturnValue
394 __ ldr(r0, return_value_operand);
395 __ bind(&return_value_loaded);
396 // No more valid handles (the result handle was the last one). Restore
397 // previous handle scope.
398 __ str(r4, MemOperand(r9, kNextOffset));
399 if (__ emit_debug_code()) {
400 __ ldr(r1, MemOperand(r9, kLevelOffset));
401 __ cmp(r1, r6);
402 __ Check(eq, AbortReason::kUnexpectedLevelAfterReturnFromApiCall);
403 }
404 __ sub(r6, r6, Operand(1));
405 __ str(r6, MemOperand(r9, kLevelOffset));
406 __ ldr(r6, MemOperand(r9, kLimitOffset));
407 __ cmp(r5, r6);
408 __ b(ne, &delete_allocated_handles);
409
410 // Leave the API exit frame.
411 __ bind(&leave_exit_frame);
412 // LeaveExitFrame expects unwind space to be in a register.
413 if (stack_space_operand != nullptr) {
414 __ ldr(r4, *stack_space_operand);
415 } else {
416 __ mov(r4, Operand(stack_space));
417 }
418 __ LeaveExitFrame(false, r4, stack_space_operand != nullptr);
419
420 // Check if the function scheduled an exception.
421 __ LoadRoot(r4, Heap::kTheHoleValueRootIndex);
422 __ Move(r6, ExternalReference::scheduled_exception_address(isolate));
423 __ ldr(r5, MemOperand(r6));
424 __ cmp(r4, r5);
425 __ b(ne, &promote_scheduled_exception);
426
427 __ mov(pc, lr);
428
429 // Re-throw by promoting a scheduled exception.
430 __ bind(&promote_scheduled_exception);
431 __ TailCallRuntime(Runtime::kPromoteScheduledException);
432
433 // HandleScope limit has changed. Delete allocated extensions.
434 __ bind(&delete_allocated_handles);
435 __ str(r5, MemOperand(r9, kLimitOffset));
436 __ mov(r4, r0);
437 __ PrepareCallCFunction(1);
438 __ Move(r0, ExternalReference::isolate_address(isolate));
439 __ CallCFunction(ExternalReference::delete_handle_scope_extensions(), 1);
440 __ mov(r0, r4);
441 __ jmp(&leave_exit_frame);
442 }
443
Generate(MacroAssembler * masm)444 void CallApiCallbackStub::Generate(MacroAssembler* masm) {
445 // ----------- S t a t e -------------
446 // -- r4 : call_data
447 // -- r2 : holder
448 // -- r1 : api_function_address
449 // -- cp : context
450 // --
451 // -- sp[0] : last argument
452 // -- ...
453 // -- sp[(argc - 1) * 4] : first argument
454 // -- sp[argc * 4] : receiver
455 // -----------------------------------
456
457 Register call_data = r4;
458 Register holder = r2;
459 Register api_function_address = r1;
460
461 typedef FunctionCallbackArguments FCA;
462
463 STATIC_ASSERT(FCA::kArgsLength == 6);
464 STATIC_ASSERT(FCA::kNewTargetIndex == 5);
465 STATIC_ASSERT(FCA::kDataIndex == 4);
466 STATIC_ASSERT(FCA::kReturnValueOffset == 3);
467 STATIC_ASSERT(FCA::kReturnValueDefaultValueIndex == 2);
468 STATIC_ASSERT(FCA::kIsolateIndex == 1);
469 STATIC_ASSERT(FCA::kHolderIndex == 0);
470
471 // new target
472 __ PushRoot(Heap::kUndefinedValueRootIndex);
473
474 // call data
475 __ push(call_data);
476
477 Register scratch0 = call_data;
478 Register scratch1 = r5;
479 __ LoadRoot(scratch0, Heap::kUndefinedValueRootIndex);
480 // return value
481 __ push(scratch0);
482 // return value default
483 __ push(scratch0);
484 // isolate
485 __ Move(scratch1, ExternalReference::isolate_address(masm->isolate()));
486 __ push(scratch1);
487 // holder
488 __ push(holder);
489
490 // Prepare arguments.
491 __ mov(scratch0, sp);
492
493 // Allocate the v8::Arguments structure in the arguments' space since
494 // it's not controlled by GC.
495 const int kApiStackSpace = 3;
496
497 FrameScope frame_scope(masm, StackFrame::MANUAL);
498 __ EnterExitFrame(false, kApiStackSpace);
499
500 DCHECK(api_function_address != r0 && scratch0 != r0);
501 // r0 = FunctionCallbackInfo&
502 // Arguments is after the return address.
503 __ add(r0, sp, Operand(1 * kPointerSize));
504 // FunctionCallbackInfo::implicit_args_
505 __ str(scratch0, MemOperand(r0, 0 * kPointerSize));
506 // FunctionCallbackInfo::values_
507 __ add(scratch1, scratch0,
508 Operand((FCA::kArgsLength - 1 + argc()) * kPointerSize));
509 __ str(scratch1, MemOperand(r0, 1 * kPointerSize));
510 // FunctionCallbackInfo::length_ = argc
511 __ mov(scratch0, Operand(argc()));
512 __ str(scratch0, MemOperand(r0, 2 * kPointerSize));
513
514 ExternalReference thunk_ref = ExternalReference::invoke_function_callback();
515
516 AllowExternalCallThatCantCauseGC scope(masm);
517 // Stores return the first js argument
518 int return_value_offset = 2 + FCA::kReturnValueOffset;
519 MemOperand return_value_operand(fp, return_value_offset * kPointerSize);
520 const int stack_space = argc() + FCA::kArgsLength + 1;
521 MemOperand* stack_space_operand = nullptr;
522
523 CallApiFunctionAndReturn(masm, api_function_address, thunk_ref, stack_space,
524 stack_space_operand, return_value_operand);
525 }
526
527
Generate(MacroAssembler * masm)528 void CallApiGetterStub::Generate(MacroAssembler* masm) {
529 // Build v8::PropertyCallbackInfo::args_ array on the stack and push property
530 // name below the exit frame to make GC aware of them.
531 STATIC_ASSERT(PropertyCallbackArguments::kShouldThrowOnErrorIndex == 0);
532 STATIC_ASSERT(PropertyCallbackArguments::kHolderIndex == 1);
533 STATIC_ASSERT(PropertyCallbackArguments::kIsolateIndex == 2);
534 STATIC_ASSERT(PropertyCallbackArguments::kReturnValueDefaultValueIndex == 3);
535 STATIC_ASSERT(PropertyCallbackArguments::kReturnValueOffset == 4);
536 STATIC_ASSERT(PropertyCallbackArguments::kDataIndex == 5);
537 STATIC_ASSERT(PropertyCallbackArguments::kThisIndex == 6);
538 STATIC_ASSERT(PropertyCallbackArguments::kArgsLength == 7);
539
540 Register receiver = ApiGetterDescriptor::ReceiverRegister();
541 Register holder = ApiGetterDescriptor::HolderRegister();
542 Register callback = ApiGetterDescriptor::CallbackRegister();
543 Register scratch = r4;
544 DCHECK(!AreAliased(receiver, holder, callback, scratch));
545
546 Register api_function_address = r2;
547
548 __ push(receiver);
549 // Push data from AccessorInfo.
550 __ ldr(scratch, FieldMemOperand(callback, AccessorInfo::kDataOffset));
551 __ push(scratch);
552 __ LoadRoot(scratch, Heap::kUndefinedValueRootIndex);
553 __ Push(scratch, scratch);
554 __ Move(scratch, ExternalReference::isolate_address(isolate()));
555 __ Push(scratch, holder);
556 __ Push(Smi::kZero); // should_throw_on_error -> false
557 __ ldr(scratch, FieldMemOperand(callback, AccessorInfo::kNameOffset));
558 __ push(scratch);
559 // v8::PropertyCallbackInfo::args_ array and name handle.
560 const int kStackUnwindSpace = PropertyCallbackArguments::kArgsLength + 1;
561
562 // Load address of v8::PropertyAccessorInfo::args_ array and name handle.
563 __ mov(r0, sp); // r0 = Handle<Name>
564 __ add(r1, r0, Operand(1 * kPointerSize)); // r1 = v8::PCI::args_
565
566 const int kApiStackSpace = 1;
567 FrameScope frame_scope(masm, StackFrame::MANUAL);
568 __ EnterExitFrame(false, kApiStackSpace);
569
570 // Create v8::PropertyCallbackInfo object on the stack and initialize
571 // it's args_ field.
572 __ str(r1, MemOperand(sp, 1 * kPointerSize));
573 __ add(r1, sp, Operand(1 * kPointerSize)); // r1 = v8::PropertyCallbackInfo&
574
575 ExternalReference thunk_ref =
576 ExternalReference::invoke_accessor_getter_callback();
577
578 __ ldr(scratch, FieldMemOperand(callback, AccessorInfo::kJsGetterOffset));
579 __ ldr(api_function_address,
580 FieldMemOperand(scratch, Foreign::kForeignAddressOffset));
581
582 // +3 is to skip prolog, return address and name handle.
583 MemOperand return_value_operand(
584 fp, (PropertyCallbackArguments::kReturnValueOffset + 3) * kPointerSize);
585 CallApiFunctionAndReturn(masm, api_function_address, thunk_ref,
586 kStackUnwindSpace, nullptr, return_value_operand);
587 }
588
589 #undef __
590
591 } // namespace internal
592 } // namespace v8
593
594 #endif // V8_TARGET_ARCH_ARM
595