1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/crankshaft/lithium.h"
6
7 #include "src/ast/scopes.h"
8 #include "src/codegen.h"
9 #include "src/objects-inl.h"
10
11 #if V8_TARGET_ARCH_IA32
12 #include "src/crankshaft/ia32/lithium-ia32.h" // NOLINT
13 #include "src/crankshaft/ia32/lithium-codegen-ia32.h" // NOLINT
14 #elif V8_TARGET_ARCH_X64
15 #include "src/crankshaft/x64/lithium-x64.h" // NOLINT
16 #include "src/crankshaft/x64/lithium-codegen-x64.h" // NOLINT
17 #elif V8_TARGET_ARCH_ARM
18 #include "src/crankshaft/arm/lithium-arm.h" // NOLINT
19 #include "src/crankshaft/arm/lithium-codegen-arm.h" // NOLINT
20 #elif V8_TARGET_ARCH_PPC
21 #include "src/crankshaft/ppc/lithium-ppc.h" // NOLINT
22 #include "src/crankshaft/ppc/lithium-codegen-ppc.h" // NOLINT
23 #elif V8_TARGET_ARCH_MIPS
24 #include "src/crankshaft/mips/lithium-mips.h" // NOLINT
25 #include "src/crankshaft/mips/lithium-codegen-mips.h" // NOLINT
26 #elif V8_TARGET_ARCH_ARM64
27 #include "src/crankshaft/arm64/lithium-arm64.h" // NOLINT
28 #include "src/crankshaft/arm64/lithium-codegen-arm64.h" // NOLINT
29 #elif V8_TARGET_ARCH_MIPS64
30 #include "src/crankshaft/mips64/lithium-mips64.h" // NOLINT
31 #include "src/crankshaft/mips64/lithium-codegen-mips64.h" // NOLINT
32 #elif V8_TARGET_ARCH_X87
33 #include "src/crankshaft/x87/lithium-x87.h" // NOLINT
34 #include "src/crankshaft/x87/lithium-codegen-x87.h" // NOLINT
35 #elif V8_TARGET_ARCH_S390
36 #include "src/crankshaft/s390/lithium-s390.h" // NOLINT
37 #include "src/crankshaft/s390/lithium-codegen-s390.h" // NOLINT
38 #else
39 #error "Unknown architecture."
40 #endif
41
42 namespace v8 {
43 namespace internal {
44
45 const auto GetRegConfig = RegisterConfiguration::Crankshaft;
46
PrintTo(StringStream * stream)47 void LOperand::PrintTo(StringStream* stream) {
48 LUnallocated* unalloc = NULL;
49 switch (kind()) {
50 case INVALID:
51 stream->Add("(0)");
52 break;
53 case UNALLOCATED:
54 unalloc = LUnallocated::cast(this);
55 stream->Add("v%d", unalloc->virtual_register());
56 if (unalloc->basic_policy() == LUnallocated::FIXED_SLOT) {
57 stream->Add("(=%dS)", unalloc->fixed_slot_index());
58 break;
59 }
60 switch (unalloc->extended_policy()) {
61 case LUnallocated::NONE:
62 break;
63 case LUnallocated::FIXED_REGISTER: {
64 int reg_index = unalloc->fixed_register_index();
65 if (reg_index < 0 || reg_index >= Register::kNumRegisters) {
66 stream->Add("(=invalid_reg#%d)", reg_index);
67 } else {
68 const char* register_name =
69 GetRegConfig()->GetGeneralRegisterName(reg_index);
70 stream->Add("(=%s)", register_name);
71 }
72 break;
73 }
74 case LUnallocated::FIXED_DOUBLE_REGISTER: {
75 int reg_index = unalloc->fixed_register_index();
76 if (reg_index < 0 || reg_index >= DoubleRegister::kMaxNumRegisters) {
77 stream->Add("(=invalid_double_reg#%d)", reg_index);
78 } else {
79 const char* double_register_name =
80 GetRegConfig()->GetDoubleRegisterName(reg_index);
81 stream->Add("(=%s)", double_register_name);
82 }
83 break;
84 }
85 case LUnallocated::MUST_HAVE_REGISTER:
86 stream->Add("(R)");
87 break;
88 case LUnallocated::MUST_HAVE_DOUBLE_REGISTER:
89 stream->Add("(D)");
90 break;
91 case LUnallocated::WRITABLE_REGISTER:
92 stream->Add("(WR)");
93 break;
94 case LUnallocated::SAME_AS_FIRST_INPUT:
95 stream->Add("(1)");
96 break;
97 case LUnallocated::ANY:
98 stream->Add("(-)");
99 break;
100 }
101 break;
102 case CONSTANT_OPERAND:
103 stream->Add("[constant:%d]", index());
104 break;
105 case STACK_SLOT:
106 stream->Add("[stack:%d]", index());
107 break;
108 case DOUBLE_STACK_SLOT:
109 stream->Add("[double_stack:%d]", index());
110 break;
111 case REGISTER: {
112 int reg_index = index();
113 if (reg_index < 0 || reg_index >= Register::kNumRegisters) {
114 stream->Add("(=invalid_reg#%d|R)", reg_index);
115 } else {
116 stream->Add("[%s|R]",
117 GetRegConfig()->GetGeneralRegisterName(reg_index));
118 }
119 break;
120 }
121 case DOUBLE_REGISTER: {
122 int reg_index = index();
123 if (reg_index < 0 || reg_index >= DoubleRegister::kMaxNumRegisters) {
124 stream->Add("(=invalid_double_reg#%d|R)", reg_index);
125 } else {
126 stream->Add("[%s|R]", GetRegConfig()->GetDoubleRegisterName(reg_index));
127 }
128 break;
129 }
130 }
131 }
132
133
134 template<LOperand::Kind kOperandKind, int kNumCachedOperands>
135 LSubKindOperand<kOperandKind, kNumCachedOperands>*
136 LSubKindOperand<kOperandKind, kNumCachedOperands>::cache = NULL;
137
138
139 template<LOperand::Kind kOperandKind, int kNumCachedOperands>
SetUpCache()140 void LSubKindOperand<kOperandKind, kNumCachedOperands>::SetUpCache() {
141 if (cache) return;
142 cache = new LSubKindOperand[kNumCachedOperands];
143 for (int i = 0; i < kNumCachedOperands; i++) {
144 cache[i].ConvertTo(kOperandKind, i);
145 }
146 }
147
148
149 template<LOperand::Kind kOperandKind, int kNumCachedOperands>
TearDownCache()150 void LSubKindOperand<kOperandKind, kNumCachedOperands>::TearDownCache() {
151 delete[] cache;
152 cache = NULL;
153 }
154
155
SetUpCaches()156 void LOperand::SetUpCaches() {
157 #define LITHIUM_OPERAND_SETUP(name, type, number) L##name::SetUpCache();
158 LITHIUM_OPERAND_LIST(LITHIUM_OPERAND_SETUP)
159 #undef LITHIUM_OPERAND_SETUP
160 }
161
162
TearDownCaches()163 void LOperand::TearDownCaches() {
164 #define LITHIUM_OPERAND_TEARDOWN(name, type, number) L##name::TearDownCache();
165 LITHIUM_OPERAND_LIST(LITHIUM_OPERAND_TEARDOWN)
166 #undef LITHIUM_OPERAND_TEARDOWN
167 }
168
169
IsRedundant() const170 bool LParallelMove::IsRedundant() const {
171 for (int i = 0; i < move_operands_.length(); ++i) {
172 if (!move_operands_[i].IsRedundant()) return false;
173 }
174 return true;
175 }
176
177
PrintDataTo(StringStream * stream) const178 void LParallelMove::PrintDataTo(StringStream* stream) const {
179 bool first = true;
180 for (int i = 0; i < move_operands_.length(); ++i) {
181 if (!move_operands_[i].IsEliminated()) {
182 LOperand* source = move_operands_[i].source();
183 LOperand* destination = move_operands_[i].destination();
184 if (!first) stream->Add(" ");
185 first = false;
186 if (source->Equals(destination)) {
187 destination->PrintTo(stream);
188 } else {
189 destination->PrintTo(stream);
190 stream->Add(" = ");
191 source->PrintTo(stream);
192 }
193 stream->Add(";");
194 }
195 }
196 }
197
198
PrintTo(StringStream * stream)199 void LEnvironment::PrintTo(StringStream* stream) {
200 stream->Add("[id=%d|", ast_id().ToInt());
201 if (deoptimization_index() != Safepoint::kNoDeoptimizationIndex) {
202 stream->Add("deopt_id=%d|", deoptimization_index());
203 }
204 stream->Add("parameters=%d|", parameter_count());
205 stream->Add("arguments_stack_height=%d|", arguments_stack_height());
206 for (int i = 0; i < values_.length(); ++i) {
207 if (i != 0) stream->Add(";");
208 if (values_[i] == NULL) {
209 stream->Add("[hole]");
210 } else {
211 values_[i]->PrintTo(stream);
212 }
213 }
214 stream->Add("]");
215 }
216
217
RecordPointer(LOperand * op,Zone * zone)218 void LPointerMap::RecordPointer(LOperand* op, Zone* zone) {
219 // Do not record arguments as pointers.
220 if (op->IsStackSlot() && op->index() < 0) return;
221 DCHECK(!op->IsDoubleRegister() && !op->IsDoubleStackSlot());
222 pointer_operands_.Add(op, zone);
223 }
224
225
RemovePointer(LOperand * op)226 void LPointerMap::RemovePointer(LOperand* op) {
227 // Do not record arguments as pointers.
228 if (op->IsStackSlot() && op->index() < 0) return;
229 DCHECK(!op->IsDoubleRegister() && !op->IsDoubleStackSlot());
230 for (int i = 0; i < pointer_operands_.length(); ++i) {
231 if (pointer_operands_[i]->Equals(op)) {
232 pointer_operands_.Remove(i);
233 --i;
234 }
235 }
236 }
237
238
RecordUntagged(LOperand * op,Zone * zone)239 void LPointerMap::RecordUntagged(LOperand* op, Zone* zone) {
240 // Do not record arguments as pointers.
241 if (op->IsStackSlot() && op->index() < 0) return;
242 DCHECK(!op->IsDoubleRegister() && !op->IsDoubleStackSlot());
243 untagged_operands_.Add(op, zone);
244 }
245
246
PrintTo(StringStream * stream)247 void LPointerMap::PrintTo(StringStream* stream) {
248 stream->Add("{");
249 for (int i = 0; i < pointer_operands_.length(); ++i) {
250 if (i != 0) stream->Add(";");
251 pointer_operands_[i]->PrintTo(stream);
252 }
253 stream->Add("}");
254 }
255
LChunk(CompilationInfo * info,HGraph * graph)256 LChunk::LChunk(CompilationInfo* info, HGraph* graph)
257 : base_frame_slots_(info->IsStub()
258 ? TypedFrameConstants::kFixedSlotCount
259 : StandardFrameConstants::kFixedSlotCount),
260 current_frame_slots_(base_frame_slots_),
261 info_(info),
262 graph_(graph),
263 instructions_(32, info->zone()),
264 pointer_maps_(8, info->zone()),
265 deprecation_dependencies_(32, info->zone()),
266 stability_dependencies_(8, info->zone()) {}
267
GetLabel(int block_id) const268 LLabel* LChunk::GetLabel(int block_id) const {
269 HBasicBlock* block = graph_->blocks()->at(block_id);
270 int first_instruction = block->first_instruction_index();
271 return LLabel::cast(instructions_[first_instruction]);
272 }
273
274
LookupDestination(int block_id) const275 int LChunk::LookupDestination(int block_id) const {
276 LLabel* cur = GetLabel(block_id);
277 while (cur->replacement() != NULL) {
278 cur = cur->replacement();
279 }
280 return cur->block_id();
281 }
282
GetAssemblyLabel(int block_id) const283 Label* LChunk::GetAssemblyLabel(int block_id) const {
284 LLabel* label = GetLabel(block_id);
285 DCHECK(!label->HasReplacement());
286 return label->label();
287 }
288
289
MarkEmptyBlocks()290 void LChunk::MarkEmptyBlocks() {
291 LPhase phase("L_Mark empty blocks", this);
292 for (int i = 0; i < graph()->blocks()->length(); ++i) {
293 HBasicBlock* block = graph()->blocks()->at(i);
294 int first = block->first_instruction_index();
295 int last = block->last_instruction_index();
296 LInstruction* first_instr = instructions()->at(first);
297 LInstruction* last_instr = instructions()->at(last);
298
299 LLabel* label = LLabel::cast(first_instr);
300 if (last_instr->IsGoto()) {
301 LGoto* goto_instr = LGoto::cast(last_instr);
302 if (label->IsRedundant() &&
303 !label->is_loop_header()) {
304 bool can_eliminate = true;
305 for (int i = first + 1; i < last && can_eliminate; ++i) {
306 LInstruction* cur = instructions()->at(i);
307 if (cur->IsGap()) {
308 LGap* gap = LGap::cast(cur);
309 if (!gap->IsRedundant()) {
310 can_eliminate = false;
311 }
312 } else {
313 can_eliminate = false;
314 }
315 }
316 if (can_eliminate) {
317 label->set_replacement(GetLabel(goto_instr->block_id()));
318 }
319 }
320 }
321 }
322 }
323
324
AddInstruction(LInstruction * instr,HBasicBlock * block)325 void LChunk::AddInstruction(LInstruction* instr, HBasicBlock* block) {
326 LInstructionGap* gap = new (zone()) LInstructionGap(block);
327 gap->set_hydrogen_value(instr->hydrogen_value());
328 int index = -1;
329 if (instr->IsControl()) {
330 instructions_.Add(gap, zone());
331 index = instructions_.length();
332 instructions_.Add(instr, zone());
333 } else {
334 index = instructions_.length();
335 instructions_.Add(instr, zone());
336 instructions_.Add(gap, zone());
337 }
338 if (instr->HasPointerMap()) {
339 pointer_maps_.Add(instr->pointer_map(), zone());
340 instr->pointer_map()->set_lithium_position(index);
341 }
342 }
343
DefineConstantOperand(HConstant * constant)344 LConstantOperand* LChunk::DefineConstantOperand(HConstant* constant) {
345 return LConstantOperand::Create(constant->id(), zone());
346 }
347
348
GetParameterStackSlot(int index) const349 int LChunk::GetParameterStackSlot(int index) const {
350 // The receiver is at index 0, the first parameter at index 1, so we
351 // shift all parameter indexes down by the number of parameters, and
352 // make sure they end up negative so they are distinguishable from
353 // spill slots.
354 int result = index - info()->num_parameters() - 1;
355
356 DCHECK(result < 0);
357 return result;
358 }
359
360
361 // A parameter relative to ebp in the arguments stub.
ParameterAt(int index)362 int LChunk::ParameterAt(int index) {
363 DCHECK(-1 <= index); // -1 is the receiver.
364 return (1 + info()->scope()->num_parameters() - index) *
365 kPointerSize;
366 }
367
368
GetGapAt(int index) const369 LGap* LChunk::GetGapAt(int index) const {
370 return LGap::cast(instructions_[index]);
371 }
372
373
IsGapAt(int index) const374 bool LChunk::IsGapAt(int index) const {
375 return instructions_[index]->IsGap();
376 }
377
378
NearestGapPos(int index) const379 int LChunk::NearestGapPos(int index) const {
380 while (!IsGapAt(index)) index--;
381 return index;
382 }
383
384
AddGapMove(int index,LOperand * from,LOperand * to)385 void LChunk::AddGapMove(int index, LOperand* from, LOperand* to) {
386 GetGapAt(index)->GetOrCreateParallelMove(
387 LGap::START, zone())->AddMove(from, to, zone());
388 }
389
390
LookupConstant(LConstantOperand * operand) const391 HConstant* LChunk::LookupConstant(LConstantOperand* operand) const {
392 return HConstant::cast(graph_->LookupValue(operand->index()));
393 }
394
395
LookupLiteralRepresentation(LConstantOperand * operand) const396 Representation LChunk::LookupLiteralRepresentation(
397 LConstantOperand* operand) const {
398 return graph_->LookupValue(operand->index())->representation();
399 }
400
401
CommitDependencies(Handle<Code> code) const402 void LChunk::CommitDependencies(Handle<Code> code) const {
403 if (!code->is_optimized_code()) return;
404 HandleScope scope(isolate());
405
406 for (Handle<Map> map : deprecation_dependencies_) {
407 DCHECK(!map->is_deprecated());
408 DCHECK(map->CanBeDeprecated());
409 Map::AddDependentCode(map, DependentCode::kTransitionGroup, code);
410 }
411
412 for (Handle<Map> map : stability_dependencies_) {
413 DCHECK(map->is_stable());
414 DCHECK(map->CanTransition());
415 Map::AddDependentCode(map, DependentCode::kPrototypeCheckGroup, code);
416 }
417
418 info_->dependencies()->Commit(code);
419 }
420
421
NewChunk(HGraph * graph)422 LChunk* LChunk::NewChunk(HGraph* graph) {
423 DisallowHandleAllocation no_handles;
424 DisallowHeapAllocation no_gc;
425 graph->DisallowAddingNewValues();
426 int values = graph->GetMaximumValueID();
427 CompilationInfo* info = graph->info();
428 if (values > LUnallocated::kMaxVirtualRegisters) {
429 info->AbortOptimization(kNotEnoughVirtualRegistersForValues);
430 return NULL;
431 }
432 LAllocator allocator(values, graph);
433 LChunkBuilder builder(info, graph, &allocator);
434 LChunk* chunk = builder.Build();
435 if (chunk == NULL) return NULL;
436
437 if (!allocator.Allocate(chunk)) {
438 info->AbortOptimization(kNotEnoughVirtualRegistersRegalloc);
439 return NULL;
440 }
441
442 chunk->set_allocated_double_registers(
443 allocator.assigned_double_registers());
444
445 return chunk;
446 }
447
448
Codegen()449 Handle<Code> LChunk::Codegen() {
450 MacroAssembler assembler(info()->isolate(), NULL, 0,
451 CodeObjectRequired::kYes);
452 // Code serializer only takes unoptimized code.
453 DCHECK(!info()->will_serialize());
454 LCodeGen generator(this, &assembler, info());
455
456 MarkEmptyBlocks();
457
458 if (generator.GenerateCode()) {
459 generator.CheckEnvironmentUsage();
460 CodeGenerator::MakeCodePrologue(info(), "optimized");
461 Handle<Code> code = CodeGenerator::MakeCodeEpilogue(
462 &assembler, nullptr, info(), assembler.CodeObject());
463 generator.FinishCode(code);
464 CommitDependencies(code);
465 Handle<ByteArray> source_positions =
466 generator.source_position_table_builder()->ToSourcePositionTable(
467 info()->isolate(), Handle<AbstractCode>::cast(code));
468 code->set_source_position_table(*source_positions);
469 code->set_is_crankshafted(true);
470
471 CodeGenerator::PrintCode(code, info());
472 return code;
473 }
474 assembler.AbortedCodeGeneration();
475 return Handle<Code>::null();
476 }
477
478
set_allocated_double_registers(BitVector * allocated_registers)479 void LChunk::set_allocated_double_registers(BitVector* allocated_registers) {
480 allocated_double_registers_ = allocated_registers;
481 BitVector* doubles = allocated_double_registers();
482 BitVector::Iterator iterator(doubles);
483 while (!iterator.Done()) {
484 if (info()->saves_caller_doubles()) {
485 if (kDoubleSize == kPointerSize * 2) {
486 current_frame_slots_ += 2;
487 } else {
488 current_frame_slots_++;
489 }
490 }
491 iterator.Advance();
492 }
493 }
494
495
Abort(BailoutReason reason)496 void LChunkBuilderBase::Abort(BailoutReason reason) {
497 info()->AbortOptimization(reason);
498 status_ = ABORTED;
499 }
500
501
Retry(BailoutReason reason)502 void LChunkBuilderBase::Retry(BailoutReason reason) {
503 info()->RetryOptimization(reason);
504 status_ = ABORTED;
505 }
506
CreateLazyBailoutForCall(HBasicBlock * current_block,LInstruction * instr,HInstruction * hydrogen_val)507 void LChunkBuilderBase::CreateLazyBailoutForCall(HBasicBlock* current_block,
508 LInstruction* instr,
509 HInstruction* hydrogen_val) {
510 if (!instr->IsCall()) return;
511
512 HEnvironment* hydrogen_env = current_block->last_environment();
513 HValue* hydrogen_value_for_lazy_bailout = hydrogen_val;
514 DCHECK_NOT_NULL(hydrogen_env);
515 if (instr->IsSyntacticTailCall()) {
516 // If it was a syntactic tail call we need to drop the current frame and
517 // all the frames on top of it that are either an arguments adaptor frame
518 // or a tail caller frame.
519 hydrogen_env = hydrogen_env->outer();
520 while (hydrogen_env != nullptr &&
521 (hydrogen_env->frame_type() == ARGUMENTS_ADAPTOR ||
522 hydrogen_env->frame_type() == TAIL_CALLER_FUNCTION)) {
523 hydrogen_env = hydrogen_env->outer();
524 }
525 if (hydrogen_env != nullptr) {
526 if (hydrogen_env->frame_type() == JS_FUNCTION) {
527 // In case an outer frame is a function frame we have to replay
528 // environment manually because
529 // 1) it does not contain a result of inlined function yet,
530 // 2) we can't find the proper simulate that corresponds to the point
531 // after inlined call to do a ReplayEnvironment() on.
532 // So we push return value on top of outer environment.
533 // As for JS_GETTER/JS_SETTER/JS_CONSTRUCT nothing has to be done here,
534 // the deoptimizer ensures that the result of the callee is correctly
535 // propagated to result register during deoptimization.
536 hydrogen_env = hydrogen_env->Copy();
537 hydrogen_env->Push(hydrogen_val);
538 }
539 } else {
540 // Although we don't need this lazy bailout for normal execution
541 // (because when we tail call from the outermost function we should pop
542 // its frame) we still need it when debugger is on.
543 hydrogen_env = current_block->last_environment();
544 }
545 } else {
546 if (hydrogen_val->HasObservableSideEffects()) {
547 HSimulate* sim = HSimulate::cast(hydrogen_val->next());
548 sim->ReplayEnvironment(hydrogen_env);
549 hydrogen_value_for_lazy_bailout = sim;
550 }
551 }
552 LInstruction* bailout = LChunkBuilderBase::AssignEnvironment(
553 new (zone()) LLazyBailout(), hydrogen_env);
554 bailout->set_hydrogen_value(hydrogen_value_for_lazy_bailout);
555 chunk_->AddInstruction(bailout, current_block);
556 }
557
AssignEnvironment(LInstruction * instr,HEnvironment * hydrogen_env)558 LInstruction* LChunkBuilderBase::AssignEnvironment(LInstruction* instr,
559 HEnvironment* hydrogen_env) {
560 int argument_index_accumulator = 0;
561 ZoneList<HValue*> objects_to_materialize(0, zone());
562 DCHECK_NE(TAIL_CALLER_FUNCTION, hydrogen_env->frame_type());
563 instr->set_environment(CreateEnvironment(
564 hydrogen_env, &argument_index_accumulator, &objects_to_materialize));
565 return instr;
566 }
567
CreateEnvironment(HEnvironment * hydrogen_env,int * argument_index_accumulator,ZoneList<HValue * > * objects_to_materialize)568 LEnvironment* LChunkBuilderBase::CreateEnvironment(
569 HEnvironment* hydrogen_env, int* argument_index_accumulator,
570 ZoneList<HValue*>* objects_to_materialize) {
571 if (hydrogen_env == NULL) return NULL;
572
573 BailoutId ast_id = hydrogen_env->ast_id();
574 DCHECK(!ast_id.IsNone() ||
575 (hydrogen_env->frame_type() != JS_FUNCTION &&
576 hydrogen_env->frame_type() != TAIL_CALLER_FUNCTION));
577
578 if (hydrogen_env->frame_type() == TAIL_CALLER_FUNCTION) {
579 // Skip potential outer arguments adaptor frame.
580 HEnvironment* outer_hydrogen_env = hydrogen_env->outer();
581 if (outer_hydrogen_env != nullptr &&
582 outer_hydrogen_env->frame_type() == ARGUMENTS_ADAPTOR) {
583 outer_hydrogen_env = outer_hydrogen_env->outer();
584 }
585 LEnvironment* outer = CreateEnvironment(
586 outer_hydrogen_env, argument_index_accumulator, objects_to_materialize);
587 return new (zone())
588 LEnvironment(hydrogen_env->closure(), hydrogen_env->frame_type(),
589 ast_id, 0, 0, 0, outer, hydrogen_env->entry(), zone());
590 }
591
592 LEnvironment* outer =
593 CreateEnvironment(hydrogen_env->outer(), argument_index_accumulator,
594 objects_to_materialize);
595
596 int omitted_count = (hydrogen_env->frame_type() == JS_FUNCTION)
597 ? 0
598 : hydrogen_env->specials_count();
599
600 int value_count = hydrogen_env->length() - omitted_count;
601 LEnvironment* result =
602 new(zone()) LEnvironment(hydrogen_env->closure(),
603 hydrogen_env->frame_type(),
604 ast_id,
605 hydrogen_env->parameter_count(),
606 argument_count_,
607 value_count,
608 outer,
609 hydrogen_env->entry(),
610 zone());
611 int argument_index = *argument_index_accumulator;
612
613 // Store the environment description into the environment
614 // (with holes for nested objects)
615 for (int i = 0; i < hydrogen_env->length(); ++i) {
616 if (hydrogen_env->is_special_index(i) &&
617 hydrogen_env->frame_type() != JS_FUNCTION) {
618 continue;
619 }
620 LOperand* op;
621 HValue* value = hydrogen_env->values()->at(i);
622 CHECK(!value->IsPushArguments()); // Do not deopt outgoing arguments
623 if (value->IsArgumentsObject() || value->IsCapturedObject()) {
624 op = LEnvironment::materialization_marker();
625 } else {
626 op = UseAny(value);
627 }
628 result->AddValue(op,
629 value->representation(),
630 value->CheckFlag(HInstruction::kUint32));
631 }
632
633 // Recursively store the nested objects into the environment
634 for (int i = 0; i < hydrogen_env->length(); ++i) {
635 if (hydrogen_env->is_special_index(i)) continue;
636
637 HValue* value = hydrogen_env->values()->at(i);
638 if (value->IsArgumentsObject() || value->IsCapturedObject()) {
639 AddObjectToMaterialize(value, objects_to_materialize, result);
640 }
641 }
642
643 if (hydrogen_env->frame_type() == JS_FUNCTION) {
644 *argument_index_accumulator = argument_index;
645 }
646
647 return result;
648 }
649
650
651 // Add an object to the supplied environment and object materialization list.
652 //
653 // Notes:
654 //
655 // We are building three lists here:
656 //
657 // 1. In the result->object_mapping_ list (added to by the
658 // LEnvironment::Add*Object methods), we store the lengths (number
659 // of fields) of the captured objects in depth-first traversal order, or
660 // in case of duplicated objects, we store the index to the duplicate object
661 // (with a tag to differentiate between captured and duplicated objects).
662 //
663 // 2. The object fields are stored in the result->values_ list
664 // (added to by the LEnvironment.AddValue method) sequentially as lists
665 // of fields with holes for nested objects (the holes will be expanded
666 // later by LCodegen::AddToTranslation according to the
667 // LEnvironment.object_mapping_ list).
668 //
669 // 3. The auxiliary objects_to_materialize array stores the hydrogen values
670 // in the same order as result->object_mapping_ list. This is used
671 // to detect duplicate values and calculate the corresponding object index.
AddObjectToMaterialize(HValue * value,ZoneList<HValue * > * objects_to_materialize,LEnvironment * result)672 void LChunkBuilderBase::AddObjectToMaterialize(HValue* value,
673 ZoneList<HValue*>* objects_to_materialize, LEnvironment* result) {
674 int object_index = objects_to_materialize->length();
675 // Store the hydrogen value into the de-duplication array
676 objects_to_materialize->Add(value, zone());
677 // Find out whether we are storing a duplicated value
678 int previously_materialized_object = -1;
679 for (int prev = 0; prev < object_index; ++prev) {
680 if (objects_to_materialize->at(prev) == value) {
681 previously_materialized_object = prev;
682 break;
683 }
684 }
685 // Store the captured object length (or duplicated object index)
686 // into the environment. For duplicated objects, we stop here.
687 int length = value->OperandCount();
688 bool is_arguments = value->IsArgumentsObject();
689 if (previously_materialized_object >= 0) {
690 result->AddDuplicateObject(previously_materialized_object);
691 return;
692 } else {
693 result->AddNewObject(is_arguments ? length - 1 : length, is_arguments);
694 }
695 // Store the captured object's fields into the environment
696 for (int i = is_arguments ? 1 : 0; i < length; ++i) {
697 LOperand* op;
698 HValue* arg_value = value->OperandAt(i);
699 if (arg_value->IsArgumentsObject() || arg_value->IsCapturedObject()) {
700 // Insert a hole for nested objects
701 op = LEnvironment::materialization_marker();
702 } else {
703 DCHECK(!arg_value->IsPushArguments());
704 // For ordinary values, tell the register allocator we need the value
705 // to be alive here
706 op = UseAny(arg_value);
707 }
708 result->AddValue(op,
709 arg_value->representation(),
710 arg_value->CheckFlag(HInstruction::kUint32));
711 }
712 // Recursively store all the nested captured objects into the environment
713 for (int i = is_arguments ? 1 : 0; i < length; ++i) {
714 HValue* arg_value = value->OperandAt(i);
715 if (arg_value->IsArgumentsObject() || arg_value->IsCapturedObject()) {
716 AddObjectToMaterialize(arg_value, objects_to_materialize, result);
717 }
718 }
719 }
720
721
~LPhase()722 LPhase::~LPhase() {
723 if (ShouldProduceTraceOutput()) {
724 isolate()->GetHTracer()->TraceLithium(name(), chunk_);
725 }
726 }
727
728
729 } // namespace internal
730 } // namespace v8
731