1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28 #include "v8.h"
29
30 #if defined(V8_TARGET_ARCH_X64)
31
32 #include "codegen.h"
33 #include "deoptimizer.h"
34 #include "full-codegen.h"
35 #include "safepoint-table.h"
36
37 namespace v8 {
38 namespace internal {
39
40
41 const int Deoptimizer::table_entry_size_ = 10;
42
43
patch_size()44 int Deoptimizer::patch_size() {
45 return Assembler::kCallInstructionLength;
46 }
47
48
DeoptimizeFunction(JSFunction * function)49 void Deoptimizer::DeoptimizeFunction(JSFunction* function) {
50 HandleScope scope;
51 AssertNoAllocation no_allocation;
52
53 if (!function->IsOptimized()) return;
54
55 // Get the optimized code.
56 Code* code = function->code();
57
58 // Invalidate the relocation information, as it will become invalid by the
59 // code patching below, and is not needed any more.
60 code->InvalidateRelocation();
61
62 // For each LLazyBailout instruction insert a absolute call to the
63 // corresponding deoptimization entry, or a short call to an absolute
64 // jump if space is short. The absolute jumps are put in a table just
65 // before the safepoint table (space was allocated there when the Code
66 // object was created, if necessary).
67
68 Address instruction_start = function->code()->instruction_start();
69 #ifdef DEBUG
70 Address prev_call_address = NULL;
71 #endif
72 DeoptimizationInputData* deopt_data =
73 DeoptimizationInputData::cast(code->deoptimization_data());
74 for (int i = 0; i < deopt_data->DeoptCount(); i++) {
75 if (deopt_data->Pc(i)->value() == -1) continue;
76 // Position where Call will be patched in.
77 Address call_address = instruction_start + deopt_data->Pc(i)->value();
78 // There is room enough to write a long call instruction because we pad
79 // LLazyBailout instructions with nops if necessary.
80 CodePatcher patcher(call_address, Assembler::kCallInstructionLength);
81 patcher.masm()->Call(GetDeoptimizationEntry(i, LAZY), RelocInfo::NONE);
82 ASSERT(prev_call_address == NULL ||
83 call_address >= prev_call_address + patch_size());
84 ASSERT(call_address + patch_size() <= code->instruction_end());
85 #ifdef DEBUG
86 prev_call_address = call_address;
87 #endif
88 }
89
90 Isolate* isolate = code->GetIsolate();
91
92 // Add the deoptimizing code to the list.
93 DeoptimizingCodeListNode* node = new DeoptimizingCodeListNode(code);
94 DeoptimizerData* data = isolate->deoptimizer_data();
95 node->set_next(data->deoptimizing_code_list_);
96 data->deoptimizing_code_list_ = node;
97
98 // We might be in the middle of incremental marking with compaction.
99 // Tell collector to treat this code object in a special way and
100 // ignore all slots that might have been recorded on it.
101 isolate->heap()->mark_compact_collector()->InvalidateCode(code);
102
103 // Set the code for the function to non-optimized version.
104 function->ReplaceCode(function->shared()->code());
105
106 if (FLAG_trace_deopt) {
107 PrintF("[forced deoptimization: ");
108 function->PrintName();
109 PrintF(" / %" V8PRIxPTR "]\n", reinterpret_cast<intptr_t>(function));
110 }
111 }
112
113
PatchStackCheckCodeAt(Code * unoptimized_code,Address pc_after,Code * check_code,Code * replacement_code)114 void Deoptimizer::PatchStackCheckCodeAt(Code* unoptimized_code,
115 Address pc_after,
116 Code* check_code,
117 Code* replacement_code) {
118 Address call_target_address = pc_after - kIntSize;
119 ASSERT(check_code->entry() ==
120 Assembler::target_address_at(call_target_address));
121 // The stack check code matches the pattern:
122 //
123 // cmp rsp, <limit>
124 // jae ok
125 // call <stack guard>
126 // test rax, <loop nesting depth>
127 // ok: ...
128 //
129 // We will patch away the branch so the code is:
130 //
131 // cmp rsp, <limit> ;; Not changed
132 // nop
133 // nop
134 // call <on-stack replacment>
135 // test rax, <loop nesting depth>
136 // ok:
137 //
138 ASSERT(*(call_target_address - 3) == 0x73 && // jae
139 *(call_target_address - 2) == 0x07 && // offset
140 *(call_target_address - 1) == 0xe8); // call
141 *(call_target_address - 3) = 0x66; // 2 byte nop part 1
142 *(call_target_address - 2) = 0x90; // 2 byte nop part 2
143 Assembler::set_target_address_at(call_target_address,
144 replacement_code->entry());
145
146 unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
147 unoptimized_code, call_target_address, replacement_code);
148 }
149
150
RevertStackCheckCodeAt(Code * unoptimized_code,Address pc_after,Code * check_code,Code * replacement_code)151 void Deoptimizer::RevertStackCheckCodeAt(Code* unoptimized_code,
152 Address pc_after,
153 Code* check_code,
154 Code* replacement_code) {
155 Address call_target_address = pc_after - kIntSize;
156 ASSERT(replacement_code->entry() ==
157 Assembler::target_address_at(call_target_address));
158 // Replace the nops from patching (Deoptimizer::PatchStackCheckCode) to
159 // restore the conditional branch.
160 ASSERT(*(call_target_address - 3) == 0x66 && // 2 byte nop part 1
161 *(call_target_address - 2) == 0x90 && // 2 byte nop part 2
162 *(call_target_address - 1) == 0xe8); // call
163 *(call_target_address - 3) = 0x73; // jae
164 *(call_target_address - 2) = 0x07; // offset
165 Assembler::set_target_address_at(call_target_address,
166 check_code->entry());
167
168 check_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
169 unoptimized_code, call_target_address, check_code);
170 }
171
172
LookupBailoutId(DeoptimizationInputData * data,unsigned ast_id)173 static int LookupBailoutId(DeoptimizationInputData* data, unsigned ast_id) {
174 ByteArray* translations = data->TranslationByteArray();
175 int length = data->DeoptCount();
176 for (int i = 0; i < length; i++) {
177 if (static_cast<unsigned>(data->AstId(i)->value()) == ast_id) {
178 TranslationIterator it(translations, data->TranslationIndex(i)->value());
179 int value = it.Next();
180 ASSERT(Translation::BEGIN == static_cast<Translation::Opcode>(value));
181 // Read the number of frames.
182 value = it.Next();
183 if (value == 1) return i;
184 }
185 }
186 UNREACHABLE();
187 return -1;
188 }
189
190
DoComputeOsrOutputFrame()191 void Deoptimizer::DoComputeOsrOutputFrame() {
192 DeoptimizationInputData* data = DeoptimizationInputData::cast(
193 optimized_code_->deoptimization_data());
194 unsigned ast_id = data->OsrAstId()->value();
195 // TODO(kasperl): This should not be the bailout_id_. It should be
196 // the ast id. Confusing.
197 ASSERT(bailout_id_ == ast_id);
198
199 int bailout_id = LookupBailoutId(data, ast_id);
200 unsigned translation_index = data->TranslationIndex(bailout_id)->value();
201 ByteArray* translations = data->TranslationByteArray();
202
203 TranslationIterator iterator(translations, translation_index);
204 Translation::Opcode opcode =
205 static_cast<Translation::Opcode>(iterator.Next());
206 ASSERT(Translation::BEGIN == opcode);
207 USE(opcode);
208 int count = iterator.Next();
209 iterator.Skip(1); // Drop JS frame count.
210 ASSERT(count == 1);
211 USE(count);
212
213 opcode = static_cast<Translation::Opcode>(iterator.Next());
214 USE(opcode);
215 ASSERT(Translation::JS_FRAME == opcode);
216 unsigned node_id = iterator.Next();
217 USE(node_id);
218 ASSERT(node_id == ast_id);
219 JSFunction* function = JSFunction::cast(ComputeLiteral(iterator.Next()));
220 USE(function);
221 ASSERT(function == function_);
222 unsigned height = iterator.Next();
223 unsigned height_in_bytes = height * kPointerSize;
224 USE(height_in_bytes);
225
226 unsigned fixed_size = ComputeFixedSize(function_);
227 unsigned input_frame_size = input_->GetFrameSize();
228 ASSERT(fixed_size + height_in_bytes == input_frame_size);
229
230 unsigned stack_slot_size = optimized_code_->stack_slots() * kPointerSize;
231 unsigned outgoing_height = data->ArgumentsStackHeight(bailout_id)->value();
232 unsigned outgoing_size = outgoing_height * kPointerSize;
233 unsigned output_frame_size = fixed_size + stack_slot_size + outgoing_size;
234 ASSERT(outgoing_size == 0); // OSR does not happen in the middle of a call.
235
236 if (FLAG_trace_osr) {
237 PrintF("[on-stack replacement: begin 0x%08" V8PRIxPTR " ",
238 reinterpret_cast<intptr_t>(function_));
239 function_->PrintName();
240 PrintF(" => node=%u, frame=%d->%d]\n",
241 ast_id,
242 input_frame_size,
243 output_frame_size);
244 }
245
246 // There's only one output frame in the OSR case.
247 output_count_ = 1;
248 output_ = new FrameDescription*[1];
249 output_[0] = new(output_frame_size) FrameDescription(
250 output_frame_size, function_);
251 output_[0]->SetFrameType(StackFrame::JAVA_SCRIPT);
252
253 // Clear the incoming parameters in the optimized frame to avoid
254 // confusing the garbage collector.
255 unsigned output_offset = output_frame_size - kPointerSize;
256 int parameter_count = function_->shared()->formal_parameter_count() + 1;
257 for (int i = 0; i < parameter_count; ++i) {
258 output_[0]->SetFrameSlot(output_offset, 0);
259 output_offset -= kPointerSize;
260 }
261
262 // Translate the incoming parameters. This may overwrite some of the
263 // incoming argument slots we've just cleared.
264 int input_offset = input_frame_size - kPointerSize;
265 bool ok = true;
266 int limit = input_offset - (parameter_count * kPointerSize);
267 while (ok && input_offset > limit) {
268 ok = DoOsrTranslateCommand(&iterator, &input_offset);
269 }
270
271 // There are no translation commands for the caller's pc and fp, the
272 // context, and the function. Set them up explicitly.
273 for (int i = StandardFrameConstants::kCallerPCOffset;
274 ok && i >= StandardFrameConstants::kMarkerOffset;
275 i -= kPointerSize) {
276 intptr_t input_value = input_->GetFrameSlot(input_offset);
277 if (FLAG_trace_osr) {
278 const char* name = "UNKNOWN";
279 switch (i) {
280 case StandardFrameConstants::kCallerPCOffset:
281 name = "caller's pc";
282 break;
283 case StandardFrameConstants::kCallerFPOffset:
284 name = "fp";
285 break;
286 case StandardFrameConstants::kContextOffset:
287 name = "context";
288 break;
289 case StandardFrameConstants::kMarkerOffset:
290 name = "function";
291 break;
292 }
293 PrintF(" [rsp + %d] <- 0x%08" V8PRIxPTR " ; [rsp + %d] "
294 "(fixed part - %s)\n",
295 output_offset,
296 input_value,
297 input_offset,
298 name);
299 }
300 output_[0]->SetFrameSlot(output_offset, input_->GetFrameSlot(input_offset));
301 input_offset -= kPointerSize;
302 output_offset -= kPointerSize;
303 }
304
305 // Translate the rest of the frame.
306 while (ok && input_offset >= 0) {
307 ok = DoOsrTranslateCommand(&iterator, &input_offset);
308 }
309
310 // If translation of any command failed, continue using the input frame.
311 if (!ok) {
312 delete output_[0];
313 output_[0] = input_;
314 output_[0]->SetPc(reinterpret_cast<intptr_t>(from_));
315 } else {
316 // Set up the frame pointer and the context pointer.
317 output_[0]->SetRegister(rbp.code(), input_->GetRegister(rbp.code()));
318 output_[0]->SetRegister(rsi.code(), input_->GetRegister(rsi.code()));
319
320 unsigned pc_offset = data->OsrPcOffset()->value();
321 intptr_t pc = reinterpret_cast<intptr_t>(
322 optimized_code_->entry() + pc_offset);
323 output_[0]->SetPc(pc);
324 }
325 Code* continuation =
326 function->GetIsolate()->builtins()->builtin(Builtins::kNotifyOSR);
327 output_[0]->SetContinuation(
328 reinterpret_cast<intptr_t>(continuation->entry()));
329
330 if (FLAG_trace_osr) {
331 PrintF("[on-stack replacement translation %s: 0x%08" V8PRIxPTR " ",
332 ok ? "finished" : "aborted",
333 reinterpret_cast<intptr_t>(function));
334 function->PrintName();
335 PrintF(" => pc=0x%0" V8PRIxPTR "]\n", output_[0]->GetPc());
336 }
337 }
338
339
DoComputeArgumentsAdaptorFrame(TranslationIterator * iterator,int frame_index)340 void Deoptimizer::DoComputeArgumentsAdaptorFrame(TranslationIterator* iterator,
341 int frame_index) {
342 JSFunction* function = JSFunction::cast(ComputeLiteral(iterator->Next()));
343 unsigned height = iterator->Next();
344 unsigned height_in_bytes = height * kPointerSize;
345 if (FLAG_trace_deopt) {
346 PrintF(" translating arguments adaptor => height=%d\n", height_in_bytes);
347 }
348
349 unsigned fixed_frame_size = ArgumentsAdaptorFrameConstants::kFrameSize;
350 unsigned output_frame_size = height_in_bytes + fixed_frame_size;
351
352 // Allocate and store the output frame description.
353 FrameDescription* output_frame =
354 new(output_frame_size) FrameDescription(output_frame_size, function);
355 output_frame->SetFrameType(StackFrame::ARGUMENTS_ADAPTOR);
356
357 // Arguments adaptor can not be topmost or bottommost.
358 ASSERT(frame_index > 0 && frame_index < output_count_ - 1);
359 ASSERT(output_[frame_index] == NULL);
360 output_[frame_index] = output_frame;
361
362 // The top address of the frame is computed from the previous
363 // frame's top and this frame's size.
364 intptr_t top_address;
365 top_address = output_[frame_index - 1]->GetTop() - output_frame_size;
366 output_frame->SetTop(top_address);
367
368 // Compute the incoming parameter translation.
369 int parameter_count = height;
370 unsigned output_offset = output_frame_size;
371 for (int i = 0; i < parameter_count; ++i) {
372 output_offset -= kPointerSize;
373 DoTranslateCommand(iterator, frame_index, output_offset);
374 }
375
376 // Read caller's PC from the previous frame.
377 output_offset -= kPointerSize;
378 intptr_t callers_pc = output_[frame_index - 1]->GetPc();
379 output_frame->SetFrameSlot(output_offset, callers_pc);
380 if (FLAG_trace_deopt) {
381 PrintF(" 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
382 V8PRIxPTR " ; caller's pc\n",
383 top_address + output_offset, output_offset, callers_pc);
384 }
385
386 // Read caller's FP from the previous frame, and set this frame's FP.
387 output_offset -= kPointerSize;
388 intptr_t value = output_[frame_index - 1]->GetFp();
389 output_frame->SetFrameSlot(output_offset, value);
390 intptr_t fp_value = top_address + output_offset;
391 output_frame->SetFp(fp_value);
392 if (FLAG_trace_deopt) {
393 PrintF(" 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
394 V8PRIxPTR " ; caller's fp\n",
395 fp_value, output_offset, value);
396 }
397
398 // A marker value is used in place of the context.
399 output_offset -= kPointerSize;
400 intptr_t context = reinterpret_cast<intptr_t>(
401 Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
402 output_frame->SetFrameSlot(output_offset, context);
403 if (FLAG_trace_deopt) {
404 PrintF(" 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
405 V8PRIxPTR " ; context (adaptor sentinel)\n",
406 top_address + output_offset, output_offset, context);
407 }
408
409 // The function was mentioned explicitly in the ARGUMENTS_ADAPTOR_FRAME.
410 output_offset -= kPointerSize;
411 value = reinterpret_cast<intptr_t>(function);
412 output_frame->SetFrameSlot(output_offset, value);
413 if (FLAG_trace_deopt) {
414 PrintF(" 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
415 V8PRIxPTR " ; function\n",
416 top_address + output_offset, output_offset, value);
417 }
418
419 // Number of incoming arguments.
420 output_offset -= kPointerSize;
421 value = reinterpret_cast<intptr_t>(Smi::FromInt(height - 1));
422 output_frame->SetFrameSlot(output_offset, value);
423 if (FLAG_trace_deopt) {
424 PrintF(" 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
425 V8PRIxPTR " ; argc (%d)\n",
426 top_address + output_offset, output_offset, value, height - 1);
427 }
428
429 ASSERT(0 == output_offset);
430
431 Builtins* builtins = isolate_->builtins();
432 Code* adaptor_trampoline =
433 builtins->builtin(Builtins::kArgumentsAdaptorTrampoline);
434 intptr_t pc_value = reinterpret_cast<intptr_t>(
435 adaptor_trampoline->instruction_start() +
436 isolate_->heap()->arguments_adaptor_deopt_pc_offset()->value());
437 output_frame->SetPc(pc_value);
438 }
439
440
DoComputeConstructStubFrame(TranslationIterator * iterator,int frame_index)441 void Deoptimizer::DoComputeConstructStubFrame(TranslationIterator* iterator,
442 int frame_index) {
443 Builtins* builtins = isolate_->builtins();
444 Code* construct_stub = builtins->builtin(Builtins::kJSConstructStubGeneric);
445 JSFunction* function = JSFunction::cast(ComputeLiteral(iterator->Next()));
446 unsigned height = iterator->Next();
447 unsigned height_in_bytes = height * kPointerSize;
448 if (FLAG_trace_deopt) {
449 PrintF(" translating construct stub => height=%d\n", height_in_bytes);
450 }
451
452 unsigned fixed_frame_size = 7 * kPointerSize;
453 unsigned output_frame_size = height_in_bytes + fixed_frame_size;
454
455 // Allocate and store the output frame description.
456 FrameDescription* output_frame =
457 new(output_frame_size) FrameDescription(output_frame_size, function);
458 output_frame->SetFrameType(StackFrame::CONSTRUCT);
459
460 // Construct stub can not be topmost or bottommost.
461 ASSERT(frame_index > 0 && frame_index < output_count_ - 1);
462 ASSERT(output_[frame_index] == NULL);
463 output_[frame_index] = output_frame;
464
465 // The top address of the frame is computed from the previous
466 // frame's top and this frame's size.
467 intptr_t top_address;
468 top_address = output_[frame_index - 1]->GetTop() - output_frame_size;
469 output_frame->SetTop(top_address);
470
471 // Compute the incoming parameter translation.
472 int parameter_count = height;
473 unsigned output_offset = output_frame_size;
474 for (int i = 0; i < parameter_count; ++i) {
475 output_offset -= kPointerSize;
476 DoTranslateCommand(iterator, frame_index, output_offset);
477 }
478
479 // Read caller's PC from the previous frame.
480 output_offset -= kPointerSize;
481 intptr_t callers_pc = output_[frame_index - 1]->GetPc();
482 output_frame->SetFrameSlot(output_offset, callers_pc);
483 if (FLAG_trace_deopt) {
484 PrintF(" 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
485 V8PRIxPTR " ; caller's pc\n",
486 top_address + output_offset, output_offset, callers_pc);
487 }
488
489 // Read caller's FP from the previous frame, and set this frame's FP.
490 output_offset -= kPointerSize;
491 intptr_t value = output_[frame_index - 1]->GetFp();
492 output_frame->SetFrameSlot(output_offset, value);
493 intptr_t fp_value = top_address + output_offset;
494 output_frame->SetFp(fp_value);
495 if (FLAG_trace_deopt) {
496 PrintF(" 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
497 V8PRIxPTR " ; caller's fp\n",
498 fp_value, output_offset, value);
499 }
500
501 // The context can be gotten from the previous frame.
502 output_offset -= kPointerSize;
503 value = output_[frame_index - 1]->GetContext();
504 output_frame->SetFrameSlot(output_offset, value);
505 if (FLAG_trace_deopt) {
506 PrintF(" 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
507 V8PRIxPTR " ; context\n",
508 top_address + output_offset, output_offset, value);
509 }
510
511 // A marker value is used in place of the function.
512 output_offset -= kPointerSize;
513 value = reinterpret_cast<intptr_t>(Smi::FromInt(StackFrame::CONSTRUCT));
514 output_frame->SetFrameSlot(output_offset, value);
515 if (FLAG_trace_deopt) {
516 PrintF(" 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
517 V8PRIxPTR " ; function (construct sentinel)\n",
518 top_address + output_offset, output_offset, value);
519 }
520
521 // The output frame reflects a JSConstructStubGeneric frame.
522 output_offset -= kPointerSize;
523 value = reinterpret_cast<intptr_t>(construct_stub);
524 output_frame->SetFrameSlot(output_offset, value);
525 if (FLAG_trace_deopt) {
526 PrintF(" 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
527 V8PRIxPTR " ; code object\n",
528 top_address + output_offset, output_offset, value);
529 }
530
531 // Number of incoming arguments.
532 output_offset -= kPointerSize;
533 value = reinterpret_cast<intptr_t>(Smi::FromInt(height - 1));
534 output_frame->SetFrameSlot(output_offset, value);
535 if (FLAG_trace_deopt) {
536 PrintF(" 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
537 V8PRIxPTR " ; argc (%d)\n",
538 top_address + output_offset, output_offset, value, height - 1);
539 }
540
541 // The newly allocated object was passed as receiver in the artificial
542 // constructor stub environment created by HEnvironment::CopyForInlining().
543 output_offset -= kPointerSize;
544 value = output_frame->GetFrameSlot(output_frame_size - kPointerSize);
545 output_frame->SetFrameSlot(output_offset, value);
546 if (FLAG_trace_deopt) {
547 PrintF(" 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
548 V8PRIxPTR " ; allocated receiver\n",
549 top_address + output_offset, output_offset, value);
550 }
551
552 ASSERT(0 == output_offset);
553
554 intptr_t pc = reinterpret_cast<intptr_t>(
555 construct_stub->instruction_start() +
556 isolate_->heap()->construct_stub_deopt_pc_offset()->value());
557 output_frame->SetPc(pc);
558 }
559
560
DoComputeJSFrame(TranslationIterator * iterator,int frame_index)561 void Deoptimizer::DoComputeJSFrame(TranslationIterator* iterator,
562 int frame_index) {
563 int node_id = iterator->Next();
564 JSFunction* function = JSFunction::cast(ComputeLiteral(iterator->Next()));
565 unsigned height = iterator->Next();
566 unsigned height_in_bytes = height * kPointerSize;
567 if (FLAG_trace_deopt) {
568 PrintF(" translating ");
569 function->PrintName();
570 PrintF(" => node=%d, height=%d\n", node_id, height_in_bytes);
571 }
572
573 // The 'fixed' part of the frame consists of the incoming parameters and
574 // the part described by JavaScriptFrameConstants.
575 unsigned fixed_frame_size = ComputeFixedSize(function);
576 unsigned input_frame_size = input_->GetFrameSize();
577 unsigned output_frame_size = height_in_bytes + fixed_frame_size;
578
579 // Allocate and store the output frame description.
580 FrameDescription* output_frame =
581 new(output_frame_size) FrameDescription(output_frame_size, function);
582 output_frame->SetFrameType(StackFrame::JAVA_SCRIPT);
583
584 bool is_bottommost = (0 == frame_index);
585 bool is_topmost = (output_count_ - 1 == frame_index);
586 ASSERT(frame_index >= 0 && frame_index < output_count_);
587 ASSERT(output_[frame_index] == NULL);
588 output_[frame_index] = output_frame;
589
590 // The top address for the bottommost output frame can be computed from
591 // the input frame pointer and the output frame's height. For all
592 // subsequent output frames, it can be computed from the previous one's
593 // top address and the current frame's size.
594 intptr_t top_address;
595 if (is_bottommost) {
596 // 2 = context and function in the frame.
597 top_address =
598 input_->GetRegister(rbp.code()) - (2 * kPointerSize) - height_in_bytes;
599 } else {
600 top_address = output_[frame_index - 1]->GetTop() - output_frame_size;
601 }
602 output_frame->SetTop(top_address);
603
604 // Compute the incoming parameter translation.
605 int parameter_count = function->shared()->formal_parameter_count() + 1;
606 unsigned output_offset = output_frame_size;
607 unsigned input_offset = input_frame_size;
608 for (int i = 0; i < parameter_count; ++i) {
609 output_offset -= kPointerSize;
610 DoTranslateCommand(iterator, frame_index, output_offset);
611 }
612 input_offset -= (parameter_count * kPointerSize);
613
614 // There are no translation commands for the caller's pc and fp, the
615 // context, and the function. Synthesize their values and set them up
616 // explicitly.
617 //
618 // The caller's pc for the bottommost output frame is the same as in the
619 // input frame. For all subsequent output frames, it can be read from the
620 // previous one. This frame's pc can be computed from the non-optimized
621 // function code and AST id of the bailout.
622 output_offset -= kPointerSize;
623 input_offset -= kPointerSize;
624 intptr_t value;
625 if (is_bottommost) {
626 value = input_->GetFrameSlot(input_offset);
627 } else {
628 value = output_[frame_index - 1]->GetPc();
629 }
630 output_frame->SetFrameSlot(output_offset, value);
631 if (FLAG_trace_deopt) {
632 PrintF(" 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
633 V8PRIxPTR " ; caller's pc\n",
634 top_address + output_offset, output_offset, value);
635 }
636
637 // The caller's frame pointer for the bottommost output frame is the same
638 // as in the input frame. For all subsequent output frames, it can be
639 // read from the previous one. Also compute and set this frame's frame
640 // pointer.
641 output_offset -= kPointerSize;
642 input_offset -= kPointerSize;
643 if (is_bottommost) {
644 value = input_->GetFrameSlot(input_offset);
645 } else {
646 value = output_[frame_index - 1]->GetFp();
647 }
648 output_frame->SetFrameSlot(output_offset, value);
649 intptr_t fp_value = top_address + output_offset;
650 ASSERT(!is_bottommost || input_->GetRegister(rbp.code()) == fp_value);
651 output_frame->SetFp(fp_value);
652 if (is_topmost) output_frame->SetRegister(rbp.code(), fp_value);
653 if (FLAG_trace_deopt) {
654 PrintF(" 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
655 V8PRIxPTR " ; caller's fp\n",
656 fp_value, output_offset, value);
657 }
658
659 // For the bottommost output frame the context can be gotten from the input
660 // frame. For all subsequent output frames it can be gotten from the function
661 // so long as we don't inline functions that need local contexts.
662 output_offset -= kPointerSize;
663 input_offset -= kPointerSize;
664 if (is_bottommost) {
665 value = input_->GetFrameSlot(input_offset);
666 } else {
667 value = reinterpret_cast<intptr_t>(function->context());
668 }
669 output_frame->SetFrameSlot(output_offset, value);
670 output_frame->SetContext(value);
671 if (is_topmost) output_frame->SetRegister(rsi.code(), value);
672 if (FLAG_trace_deopt) {
673 PrintF(" 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
674 V8PRIxPTR "; context\n",
675 top_address + output_offset, output_offset, value);
676 }
677
678 // The function was mentioned explicitly in the BEGIN_FRAME.
679 output_offset -= kPointerSize;
680 input_offset -= kPointerSize;
681 value = reinterpret_cast<intptr_t>(function);
682 // The function for the bottommost output frame should also agree with the
683 // input frame.
684 ASSERT(!is_bottommost || input_->GetFrameSlot(input_offset) == value);
685 output_frame->SetFrameSlot(output_offset, value);
686 if (FLAG_trace_deopt) {
687 PrintF(" 0x%08" V8PRIxPTR ": [top + %d] <- 0x%08"
688 V8PRIxPTR "; function\n",
689 top_address + output_offset, output_offset, value);
690 }
691
692 // Translate the rest of the frame.
693 for (unsigned i = 0; i < height; ++i) {
694 output_offset -= kPointerSize;
695 DoTranslateCommand(iterator, frame_index, output_offset);
696 }
697 ASSERT(0 == output_offset);
698
699 // Compute this frame's PC, state, and continuation.
700 Code* non_optimized_code = function->shared()->code();
701 FixedArray* raw_data = non_optimized_code->deoptimization_data();
702 DeoptimizationOutputData* data = DeoptimizationOutputData::cast(raw_data);
703 Address start = non_optimized_code->instruction_start();
704 unsigned pc_and_state = GetOutputInfo(data, node_id, function->shared());
705 unsigned pc_offset = FullCodeGenerator::PcField::decode(pc_and_state);
706 intptr_t pc_value = reinterpret_cast<intptr_t>(start + pc_offset);
707 output_frame->SetPc(pc_value);
708
709 FullCodeGenerator::State state =
710 FullCodeGenerator::StateField::decode(pc_and_state);
711 output_frame->SetState(Smi::FromInt(state));
712
713 // Set the continuation for the topmost frame.
714 if (is_topmost && bailout_type_ != DEBUGGER) {
715 Code* continuation = (bailout_type_ == EAGER)
716 ? isolate_->builtins()->builtin(Builtins::kNotifyDeoptimized)
717 : isolate_->builtins()->builtin(Builtins::kNotifyLazyDeoptimized);
718 output_frame->SetContinuation(
719 reinterpret_cast<intptr_t>(continuation->entry()));
720 }
721 }
722
723
FillInputFrame(Address tos,JavaScriptFrame * frame)724 void Deoptimizer::FillInputFrame(Address tos, JavaScriptFrame* frame) {
725 // Set the register values. The values are not important as there are no
726 // callee saved registers in JavaScript frames, so all registers are
727 // spilled. Registers rbp and rsp are set to the correct values though.
728 for (int i = 0; i < Register::kNumRegisters; i++) {
729 input_->SetRegister(i, i * 4);
730 }
731 input_->SetRegister(rsp.code(), reinterpret_cast<intptr_t>(frame->sp()));
732 input_->SetRegister(rbp.code(), reinterpret_cast<intptr_t>(frame->fp()));
733 for (int i = 0; i < DoubleRegister::kNumAllocatableRegisters; i++) {
734 input_->SetDoubleRegister(i, 0.0);
735 }
736
737 // Fill the frame content from the actual data on the frame.
738 for (unsigned i = 0; i < input_->GetFrameSize(); i += kPointerSize) {
739 input_->SetFrameSlot(i, Memory::uint64_at(tos + i));
740 }
741 }
742
743
744 #define __ masm()->
745
Generate()746 void Deoptimizer::EntryGenerator::Generate() {
747 GeneratePrologue();
748
749 // Save all general purpose registers before messing with them.
750 const int kNumberOfRegisters = Register::kNumRegisters;
751
752 const int kDoubleRegsSize = kDoubleSize *
753 XMMRegister::kNumAllocatableRegisters;
754 __ subq(rsp, Immediate(kDoubleRegsSize));
755
756 for (int i = 0; i < XMMRegister::kNumAllocatableRegisters; ++i) {
757 XMMRegister xmm_reg = XMMRegister::FromAllocationIndex(i);
758 int offset = i * kDoubleSize;
759 __ movsd(Operand(rsp, offset), xmm_reg);
760 }
761
762 // We push all registers onto the stack, even though we do not need
763 // to restore all later.
764 for (int i = 0; i < kNumberOfRegisters; i++) {
765 Register r = Register::from_code(i);
766 __ push(r);
767 }
768
769 const int kSavedRegistersAreaSize = kNumberOfRegisters * kPointerSize +
770 kDoubleRegsSize;
771
772 // When calling new_deoptimizer_function we need to pass the last argument
773 // on the stack on windows and in r8 on linux. The remaining arguments are
774 // all passed in registers (different ones on linux and windows though).
775
776 #ifdef _WIN64
777 Register arg4 = r9;
778 Register arg3 = r8;
779 Register arg2 = rdx;
780 Register arg1 = rcx;
781 #else
782 Register arg4 = rcx;
783 Register arg3 = rdx;
784 Register arg2 = rsi;
785 Register arg1 = rdi;
786 #endif
787
788 // We use this to keep the value of the fifth argument temporarily.
789 // Unfortunately we can't store it directly in r8 (used for passing
790 // this on linux), since it is another parameter passing register on windows.
791 Register arg5 = r11;
792
793 // Get the bailout id from the stack.
794 __ movq(arg3, Operand(rsp, kSavedRegistersAreaSize));
795
796 // Get the address of the location in the code object if possible
797 // and compute the fp-to-sp delta in register arg5.
798 if (type() == EAGER) {
799 __ Set(arg4, 0);
800 __ lea(arg5, Operand(rsp, kSavedRegistersAreaSize + 1 * kPointerSize));
801 } else {
802 __ movq(arg4, Operand(rsp, kSavedRegistersAreaSize + 1 * kPointerSize));
803 __ lea(arg5, Operand(rsp, kSavedRegistersAreaSize + 2 * kPointerSize));
804 }
805
806 __ subq(arg5, rbp);
807 __ neg(arg5);
808
809 // Allocate a new deoptimizer object.
810 __ PrepareCallCFunction(6);
811 __ movq(rax, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
812 __ movq(arg1, rax);
813 __ Set(arg2, type());
814 // Args 3 and 4 are already in the right registers.
815
816 // On windows put the arguments on the stack (PrepareCallCFunction
817 // has created space for this). On linux pass the arguments in r8 and r9.
818 #ifdef _WIN64
819 __ movq(Operand(rsp, 4 * kPointerSize), arg5);
820 __ LoadAddress(arg5, ExternalReference::isolate_address());
821 __ movq(Operand(rsp, 5 * kPointerSize), arg5);
822 #else
823 __ movq(r8, arg5);
824 __ LoadAddress(r9, ExternalReference::isolate_address());
825 #endif
826
827 Isolate* isolate = masm()->isolate();
828
829 {
830 AllowExternalCallThatCantCauseGC scope(masm());
831 __ CallCFunction(ExternalReference::new_deoptimizer_function(isolate), 6);
832 }
833 // Preserve deoptimizer object in register rax and get the input
834 // frame descriptor pointer.
835 __ movq(rbx, Operand(rax, Deoptimizer::input_offset()));
836
837 // Fill in the input registers.
838 for (int i = kNumberOfRegisters -1; i >= 0; i--) {
839 int offset = (i * kPointerSize) + FrameDescription::registers_offset();
840 __ pop(Operand(rbx, offset));
841 }
842
843 // Fill in the double input registers.
844 int double_regs_offset = FrameDescription::double_registers_offset();
845 for (int i = 0; i < XMMRegister::kNumAllocatableRegisters; i++) {
846 int dst_offset = i * kDoubleSize + double_regs_offset;
847 __ pop(Operand(rbx, dst_offset));
848 }
849
850 // Remove the bailout id from the stack.
851 if (type() == EAGER) {
852 __ addq(rsp, Immediate(kPointerSize));
853 } else {
854 __ addq(rsp, Immediate(2 * kPointerSize));
855 }
856
857 // Compute a pointer to the unwinding limit in register rcx; that is
858 // the first stack slot not part of the input frame.
859 __ movq(rcx, Operand(rbx, FrameDescription::frame_size_offset()));
860 __ addq(rcx, rsp);
861
862 // Unwind the stack down to - but not including - the unwinding
863 // limit and copy the contents of the activation frame to the input
864 // frame description.
865 __ lea(rdx, Operand(rbx, FrameDescription::frame_content_offset()));
866 Label pop_loop;
867 __ bind(&pop_loop);
868 __ pop(Operand(rdx, 0));
869 __ addq(rdx, Immediate(sizeof(intptr_t)));
870 __ cmpq(rcx, rsp);
871 __ j(not_equal, &pop_loop);
872
873 // Compute the output frame in the deoptimizer.
874 __ push(rax);
875 __ PrepareCallCFunction(2);
876 __ movq(arg1, rax);
877 __ LoadAddress(arg2, ExternalReference::isolate_address());
878 {
879 AllowExternalCallThatCantCauseGC scope(masm());
880 __ CallCFunction(
881 ExternalReference::compute_output_frames_function(isolate), 2);
882 }
883 __ pop(rax);
884
885 // Replace the current frame with the output frames.
886 Label outer_push_loop, inner_push_loop;
887 // Outer loop state: rax = current FrameDescription**, rdx = one past the
888 // last FrameDescription**.
889 __ movl(rdx, Operand(rax, Deoptimizer::output_count_offset()));
890 __ movq(rax, Operand(rax, Deoptimizer::output_offset()));
891 __ lea(rdx, Operand(rax, rdx, times_8, 0));
892 __ bind(&outer_push_loop);
893 // Inner loop state: rbx = current FrameDescription*, rcx = loop index.
894 __ movq(rbx, Operand(rax, 0));
895 __ movq(rcx, Operand(rbx, FrameDescription::frame_size_offset()));
896 __ bind(&inner_push_loop);
897 __ subq(rcx, Immediate(sizeof(intptr_t)));
898 __ push(Operand(rbx, rcx, times_1, FrameDescription::frame_content_offset()));
899 __ testq(rcx, rcx);
900 __ j(not_zero, &inner_push_loop);
901 __ addq(rax, Immediate(kPointerSize));
902 __ cmpq(rax, rdx);
903 __ j(below, &outer_push_loop);
904
905 // In case of OSR, we have to restore the XMM registers.
906 if (type() == OSR) {
907 for (int i = 0; i < XMMRegister::kNumAllocatableRegisters; ++i) {
908 XMMRegister xmm_reg = XMMRegister::FromAllocationIndex(i);
909 int src_offset = i * kDoubleSize + double_regs_offset;
910 __ movsd(xmm_reg, Operand(rbx, src_offset));
911 }
912 }
913
914 // Push state, pc, and continuation from the last output frame.
915 if (type() != OSR) {
916 __ push(Operand(rbx, FrameDescription::state_offset()));
917 }
918 __ push(Operand(rbx, FrameDescription::pc_offset()));
919 __ push(Operand(rbx, FrameDescription::continuation_offset()));
920
921 // Push the registers from the last output frame.
922 for (int i = 0; i < kNumberOfRegisters; i++) {
923 int offset = (i * kPointerSize) + FrameDescription::registers_offset();
924 __ push(Operand(rbx, offset));
925 }
926
927 // Restore the registers from the stack.
928 for (int i = kNumberOfRegisters - 1; i >= 0 ; i--) {
929 Register r = Register::from_code(i);
930 // Do not restore rsp, simply pop the value into the next register
931 // and overwrite this afterwards.
932 if (r.is(rsp)) {
933 ASSERT(i > 0);
934 r = Register::from_code(i - 1);
935 }
936 __ pop(r);
937 }
938
939 // Set up the roots register.
940 __ InitializeRootRegister();
941 __ InitializeSmiConstantRegister();
942
943 // Return to the continuation point.
944 __ ret(0);
945 }
946
947
GeneratePrologue()948 void Deoptimizer::TableEntryGenerator::GeneratePrologue() {
949 // Create a sequence of deoptimization entries.
950 Label done;
951 for (int i = 0; i < count(); i++) {
952 int start = masm()->pc_offset();
953 USE(start);
954 __ push_imm32(i);
955 __ jmp(&done);
956 ASSERT(masm()->pc_offset() - start == table_entry_size_);
957 }
958 __ bind(&done);
959 }
960
961 #undef __
962
963
964 } } // namespace v8::internal
965
966 #endif // V8_TARGET_ARCH_X64
967