• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "src/lithium.h"
6 
7 #include "src/v8.h"
8 
9 #include "src/scopes.h"
10 #include "src/serialize.h"
11 
12 #if V8_TARGET_ARCH_IA32
13 #include "src/ia32/lithium-ia32.h"  // NOLINT
14 #include "src/ia32/lithium-codegen-ia32.h"  // NOLINT
15 #elif V8_TARGET_ARCH_X64
16 #include "src/x64/lithium-x64.h"  // NOLINT
17 #include "src/x64/lithium-codegen-x64.h"  // NOLINT
18 #elif V8_TARGET_ARCH_ARM
19 #include "src/arm/lithium-arm.h"  // NOLINT
20 #include "src/arm/lithium-codegen-arm.h"  // NOLINT
21 #elif V8_TARGET_ARCH_MIPS
22 #include "src/mips/lithium-mips.h"  // NOLINT
23 #include "src/mips/lithium-codegen-mips.h"  // NOLINT
24 #elif V8_TARGET_ARCH_ARM64
25 #include "src/arm64/lithium-arm64.h"  // NOLINT
26 #include "src/arm64/lithium-codegen-arm64.h"  // NOLINT
27 #elif V8_TARGET_ARCH_MIPS64
28 #include "src/mips64/lithium-mips64.h"  // NOLINT
29 #include "src/mips64/lithium-codegen-mips64.h"  // NOLINT
30 #elif V8_TARGET_ARCH_X87
31 #include "src/x87/lithium-x87.h"  // NOLINT
32 #include "src/x87/lithium-codegen-x87.h"  // NOLINT
33 #else
34 #error "Unknown architecture."
35 #endif
36 
37 namespace v8 {
38 namespace internal {
39 
40 
PrintTo(StringStream * stream)41 void LOperand::PrintTo(StringStream* stream) {
42   LUnallocated* unalloc = NULL;
43   switch (kind()) {
44     case INVALID:
45       stream->Add("(0)");
46       break;
47     case UNALLOCATED:
48       unalloc = LUnallocated::cast(this);
49       stream->Add("v%d", unalloc->virtual_register());
50       if (unalloc->basic_policy() == LUnallocated::FIXED_SLOT) {
51         stream->Add("(=%dS)", unalloc->fixed_slot_index());
52         break;
53       }
54       switch (unalloc->extended_policy()) {
55         case LUnallocated::NONE:
56           break;
57         case LUnallocated::FIXED_REGISTER: {
58           int reg_index = unalloc->fixed_register_index();
59           if (reg_index < 0 ||
60               reg_index >= Register::kMaxNumAllocatableRegisters) {
61             stream->Add("(=invalid_reg#%d)", reg_index);
62           } else {
63             const char* register_name =
64                 Register::AllocationIndexToString(reg_index);
65             stream->Add("(=%s)", register_name);
66           }
67           break;
68         }
69         case LUnallocated::FIXED_DOUBLE_REGISTER: {
70           int reg_index = unalloc->fixed_register_index();
71           if (reg_index < 0 ||
72               reg_index >= DoubleRegister::kMaxNumAllocatableRegisters) {
73             stream->Add("(=invalid_double_reg#%d)", reg_index);
74           } else {
75             const char* double_register_name =
76                 DoubleRegister::AllocationIndexToString(reg_index);
77             stream->Add("(=%s)", double_register_name);
78           }
79           break;
80         }
81         case LUnallocated::MUST_HAVE_REGISTER:
82           stream->Add("(R)");
83           break;
84         case LUnallocated::MUST_HAVE_DOUBLE_REGISTER:
85           stream->Add("(D)");
86           break;
87         case LUnallocated::WRITABLE_REGISTER:
88           stream->Add("(WR)");
89           break;
90         case LUnallocated::SAME_AS_FIRST_INPUT:
91           stream->Add("(1)");
92           break;
93         case LUnallocated::ANY:
94           stream->Add("(-)");
95           break;
96       }
97       break;
98     case CONSTANT_OPERAND:
99       stream->Add("[constant:%d]", index());
100       break;
101     case STACK_SLOT:
102       stream->Add("[stack:%d]", index());
103       break;
104     case DOUBLE_STACK_SLOT:
105       stream->Add("[double_stack:%d]", index());
106       break;
107     case REGISTER: {
108       int reg_index = index();
109       if (reg_index < 0 || reg_index >= Register::kMaxNumAllocatableRegisters) {
110         stream->Add("(=invalid_reg#%d|R)", reg_index);
111       } else {
112         stream->Add("[%s|R]", Register::AllocationIndexToString(reg_index));
113       }
114       break;
115     }
116     case DOUBLE_REGISTER: {
117       int reg_index = index();
118       if (reg_index < 0 ||
119           reg_index >= DoubleRegister::kMaxNumAllocatableRegisters) {
120         stream->Add("(=invalid_double_reg#%d|R)", reg_index);
121       } else {
122         stream->Add("[%s|R]",
123                     DoubleRegister::AllocationIndexToString(reg_index));
124       }
125       break;
126     }
127   }
128 }
129 
130 
131 template<LOperand::Kind kOperandKind, int kNumCachedOperands>
132 LSubKindOperand<kOperandKind, kNumCachedOperands>*
133 LSubKindOperand<kOperandKind, kNumCachedOperands>::cache = NULL;
134 
135 
136 template<LOperand::Kind kOperandKind, int kNumCachedOperands>
SetUpCache()137 void LSubKindOperand<kOperandKind, kNumCachedOperands>::SetUpCache() {
138   if (cache) return;
139   cache = new LSubKindOperand[kNumCachedOperands];
140   for (int i = 0; i < kNumCachedOperands; i++) {
141     cache[i].ConvertTo(kOperandKind, i);
142   }
143 }
144 
145 
146 template<LOperand::Kind kOperandKind, int kNumCachedOperands>
TearDownCache()147 void LSubKindOperand<kOperandKind, kNumCachedOperands>::TearDownCache() {
148   delete[] cache;
149   cache = NULL;
150 }
151 
152 
SetUpCaches()153 void LOperand::SetUpCaches() {
154 #define LITHIUM_OPERAND_SETUP(name, type, number) L##name::SetUpCache();
155   LITHIUM_OPERAND_LIST(LITHIUM_OPERAND_SETUP)
156 #undef LITHIUM_OPERAND_SETUP
157 }
158 
159 
TearDownCaches()160 void LOperand::TearDownCaches() {
161 #define LITHIUM_OPERAND_TEARDOWN(name, type, number) L##name::TearDownCache();
162   LITHIUM_OPERAND_LIST(LITHIUM_OPERAND_TEARDOWN)
163 #undef LITHIUM_OPERAND_TEARDOWN
164 }
165 
166 
IsRedundant() const167 bool LParallelMove::IsRedundant() const {
168   for (int i = 0; i < move_operands_.length(); ++i) {
169     if (!move_operands_[i].IsRedundant()) return false;
170   }
171   return true;
172 }
173 
174 
PrintDataTo(StringStream * stream) const175 void LParallelMove::PrintDataTo(StringStream* stream) const {
176   bool first = true;
177   for (int i = 0; i < move_operands_.length(); ++i) {
178     if (!move_operands_[i].IsEliminated()) {
179       LOperand* source = move_operands_[i].source();
180       LOperand* destination = move_operands_[i].destination();
181       if (!first) stream->Add(" ");
182       first = false;
183       if (source->Equals(destination)) {
184         destination->PrintTo(stream);
185       } else {
186         destination->PrintTo(stream);
187         stream->Add(" = ");
188         source->PrintTo(stream);
189       }
190       stream->Add(";");
191     }
192   }
193 }
194 
195 
PrintTo(StringStream * stream)196 void LEnvironment::PrintTo(StringStream* stream) {
197   stream->Add("[id=%d|", ast_id().ToInt());
198   if (deoptimization_index() != Safepoint::kNoDeoptimizationIndex) {
199     stream->Add("deopt_id=%d|", deoptimization_index());
200   }
201   stream->Add("parameters=%d|", parameter_count());
202   stream->Add("arguments_stack_height=%d|", arguments_stack_height());
203   for (int i = 0; i < values_.length(); ++i) {
204     if (i != 0) stream->Add(";");
205     if (values_[i] == NULL) {
206       stream->Add("[hole]");
207     } else {
208       values_[i]->PrintTo(stream);
209     }
210   }
211   stream->Add("]");
212 }
213 
214 
RecordPointer(LOperand * op,Zone * zone)215 void LPointerMap::RecordPointer(LOperand* op, Zone* zone) {
216   // Do not record arguments as pointers.
217   if (op->IsStackSlot() && op->index() < 0) return;
218   DCHECK(!op->IsDoubleRegister() && !op->IsDoubleStackSlot());
219   pointer_operands_.Add(op, zone);
220 }
221 
222 
RemovePointer(LOperand * op)223 void LPointerMap::RemovePointer(LOperand* op) {
224   // Do not record arguments as pointers.
225   if (op->IsStackSlot() && op->index() < 0) return;
226   DCHECK(!op->IsDoubleRegister() && !op->IsDoubleStackSlot());
227   for (int i = 0; i < pointer_operands_.length(); ++i) {
228     if (pointer_operands_[i]->Equals(op)) {
229       pointer_operands_.Remove(i);
230       --i;
231     }
232   }
233 }
234 
235 
RecordUntagged(LOperand * op,Zone * zone)236 void LPointerMap::RecordUntagged(LOperand* op, Zone* zone) {
237   // Do not record arguments as pointers.
238   if (op->IsStackSlot() && op->index() < 0) return;
239   DCHECK(!op->IsDoubleRegister() && !op->IsDoubleStackSlot());
240   untagged_operands_.Add(op, zone);
241 }
242 
243 
PrintTo(StringStream * stream)244 void LPointerMap::PrintTo(StringStream* stream) {
245   stream->Add("{");
246   for (int i = 0; i < pointer_operands_.length(); ++i) {
247     if (i != 0) stream->Add(";");
248     pointer_operands_[i]->PrintTo(stream);
249   }
250   stream->Add("}");
251 }
252 
253 
StackSlotOffset(int index)254 int StackSlotOffset(int index) {
255   if (index >= 0) {
256     // Local or spill slot. Skip the frame pointer, function, and
257     // context in the fixed part of the frame.
258     return -(index + 1) * kPointerSize -
259         StandardFrameConstants::kFixedFrameSizeFromFp;
260   } else {
261     // Incoming parameter. Skip the return address.
262     return -(index + 1) * kPointerSize + kFPOnStackSize + kPCOnStackSize;
263   }
264 }
265 
266 
LChunk(CompilationInfo * info,HGraph * graph)267 LChunk::LChunk(CompilationInfo* info, HGraph* graph)
268     : spill_slot_count_(0),
269       info_(info),
270       graph_(graph),
271       instructions_(32, info->zone()),
272       pointer_maps_(8, info->zone()),
273       inlined_closures_(1, info->zone()),
274       deprecation_dependencies_(MapLess(), MapAllocator(info->zone())),
275       stability_dependencies_(MapLess(), MapAllocator(info->zone())) {}
276 
277 
GetLabel(int block_id) const278 LLabel* LChunk::GetLabel(int block_id) const {
279   HBasicBlock* block = graph_->blocks()->at(block_id);
280   int first_instruction = block->first_instruction_index();
281   return LLabel::cast(instructions_[first_instruction]);
282 }
283 
284 
LookupDestination(int block_id) const285 int LChunk::LookupDestination(int block_id) const {
286   LLabel* cur = GetLabel(block_id);
287   while (cur->replacement() != NULL) {
288     cur = cur->replacement();
289   }
290   return cur->block_id();
291 }
292 
GetAssemblyLabel(int block_id) const293 Label* LChunk::GetAssemblyLabel(int block_id) const {
294   LLabel* label = GetLabel(block_id);
295   DCHECK(!label->HasReplacement());
296   return label->label();
297 }
298 
299 
MarkEmptyBlocks()300 void LChunk::MarkEmptyBlocks() {
301   LPhase phase("L_Mark empty blocks", this);
302   for (int i = 0; i < graph()->blocks()->length(); ++i) {
303     HBasicBlock* block = graph()->blocks()->at(i);
304     int first = block->first_instruction_index();
305     int last = block->last_instruction_index();
306     LInstruction* first_instr = instructions()->at(first);
307     LInstruction* last_instr = instructions()->at(last);
308 
309     LLabel* label = LLabel::cast(first_instr);
310     if (last_instr->IsGoto()) {
311       LGoto* goto_instr = LGoto::cast(last_instr);
312       if (label->IsRedundant() &&
313           !label->is_loop_header()) {
314         bool can_eliminate = true;
315         for (int i = first + 1; i < last && can_eliminate; ++i) {
316           LInstruction* cur = instructions()->at(i);
317           if (cur->IsGap()) {
318             LGap* gap = LGap::cast(cur);
319             if (!gap->IsRedundant()) {
320               can_eliminate = false;
321             }
322           } else {
323             can_eliminate = false;
324           }
325         }
326         if (can_eliminate) {
327           label->set_replacement(GetLabel(goto_instr->block_id()));
328         }
329       }
330     }
331   }
332 }
333 
334 
AddInstruction(LInstruction * instr,HBasicBlock * block)335 void LChunk::AddInstruction(LInstruction* instr, HBasicBlock* block) {
336   LInstructionGap* gap = new (zone()) LInstructionGap(block);
337   gap->set_hydrogen_value(instr->hydrogen_value());
338   int index = -1;
339   if (instr->IsControl()) {
340     instructions_.Add(gap, zone());
341     index = instructions_.length();
342     instructions_.Add(instr, zone());
343   } else {
344     index = instructions_.length();
345     instructions_.Add(instr, zone());
346     instructions_.Add(gap, zone());
347   }
348   if (instr->HasPointerMap()) {
349     pointer_maps_.Add(instr->pointer_map(), zone());
350     instr->pointer_map()->set_lithium_position(index);
351   }
352 }
353 
354 
DefineConstantOperand(HConstant * constant)355 LConstantOperand* LChunk::DefineConstantOperand(HConstant* constant) {
356   return LConstantOperand::Create(constant->id(), zone());
357 }
358 
359 
GetParameterStackSlot(int index) const360 int LChunk::GetParameterStackSlot(int index) const {
361   // The receiver is at index 0, the first parameter at index 1, so we
362   // shift all parameter indexes down by the number of parameters, and
363   // make sure they end up negative so they are distinguishable from
364   // spill slots.
365   int result = index - info()->num_parameters() - 1;
366 
367   DCHECK(result < 0);
368   return result;
369 }
370 
371 
372 // A parameter relative to ebp in the arguments stub.
ParameterAt(int index)373 int LChunk::ParameterAt(int index) {
374   DCHECK(-1 <= index);  // -1 is the receiver.
375   return (1 + info()->scope()->num_parameters() - index) *
376       kPointerSize;
377 }
378 
379 
GetGapAt(int index) const380 LGap* LChunk::GetGapAt(int index) const {
381   return LGap::cast(instructions_[index]);
382 }
383 
384 
IsGapAt(int index) const385 bool LChunk::IsGapAt(int index) const {
386   return instructions_[index]->IsGap();
387 }
388 
389 
NearestGapPos(int index) const390 int LChunk::NearestGapPos(int index) const {
391   while (!IsGapAt(index)) index--;
392   return index;
393 }
394 
395 
AddGapMove(int index,LOperand * from,LOperand * to)396 void LChunk::AddGapMove(int index, LOperand* from, LOperand* to) {
397   GetGapAt(index)->GetOrCreateParallelMove(
398       LGap::START, zone())->AddMove(from, to, zone());
399 }
400 
401 
LookupConstant(LConstantOperand * operand) const402 HConstant* LChunk::LookupConstant(LConstantOperand* operand) const {
403   return HConstant::cast(graph_->LookupValue(operand->index()));
404 }
405 
406 
LookupLiteralRepresentation(LConstantOperand * operand) const407 Representation LChunk::LookupLiteralRepresentation(
408     LConstantOperand* operand) const {
409   return graph_->LookupValue(operand->index())->representation();
410 }
411 
412 
CommitDependencies(Handle<Code> code) const413 void LChunk::CommitDependencies(Handle<Code> code) const {
414   for (MapSet::const_iterator it = deprecation_dependencies_.begin(),
415        iend = deprecation_dependencies_.end(); it != iend; ++it) {
416     Handle<Map> map = *it;
417     DCHECK(!map->is_deprecated());
418     DCHECK(map->CanBeDeprecated());
419     Map::AddDependentCode(map, DependentCode::kTransitionGroup, code);
420   }
421 
422   for (MapSet::const_iterator it = stability_dependencies_.begin(),
423        iend = stability_dependencies_.end(); it != iend; ++it) {
424     Handle<Map> map = *it;
425     DCHECK(map->is_stable());
426     DCHECK(map->CanTransition());
427     Map::AddDependentCode(map, DependentCode::kPrototypeCheckGroup, code);
428   }
429 
430   info_->CommitDependencies(code);
431 }
432 
433 
NewChunk(HGraph * graph)434 LChunk* LChunk::NewChunk(HGraph* graph) {
435   DisallowHandleAllocation no_handles;
436   DisallowHeapAllocation no_gc;
437   graph->DisallowAddingNewValues();
438   int values = graph->GetMaximumValueID();
439   CompilationInfo* info = graph->info();
440   if (values > LUnallocated::kMaxVirtualRegisters) {
441     info->AbortOptimization(kNotEnoughVirtualRegistersForValues);
442     return NULL;
443   }
444   LAllocator allocator(values, graph);
445   LChunkBuilder builder(info, graph, &allocator);
446   LChunk* chunk = builder.Build();
447   if (chunk == NULL) return NULL;
448 
449   if (!allocator.Allocate(chunk)) {
450     info->AbortOptimization(kNotEnoughVirtualRegistersRegalloc);
451     return NULL;
452   }
453 
454   chunk->set_allocated_double_registers(
455       allocator.assigned_double_registers());
456 
457   return chunk;
458 }
459 
460 
Codegen()461 Handle<Code> LChunk::Codegen() {
462   MacroAssembler assembler(info()->isolate(), NULL, 0);
463   LOG_CODE_EVENT(info()->isolate(),
464                  CodeStartLinePosInfoRecordEvent(
465                      assembler.positions_recorder()));
466   // TODO(yangguo) remove this once the code serializer handles code stubs.
467   if (info()->will_serialize()) assembler.enable_serializer();
468   LCodeGen generator(this, &assembler, info());
469 
470   MarkEmptyBlocks();
471 
472   if (generator.GenerateCode()) {
473     generator.CheckEnvironmentUsage();
474     CodeGenerator::MakeCodePrologue(info(), "optimized");
475     Code::Flags flags = info()->flags();
476     Handle<Code> code =
477         CodeGenerator::MakeCodeEpilogue(&assembler, flags, info());
478     generator.FinishCode(code);
479     CommitDependencies(code);
480     code->set_is_crankshafted(true);
481     void* jit_handler_data =
482         assembler.positions_recorder()->DetachJITHandlerData();
483     LOG_CODE_EVENT(info()->isolate(),
484                    CodeEndLinePosInfoRecordEvent(*code, jit_handler_data));
485 
486     CodeGenerator::PrintCode(code, info());
487     DCHECK(!(info()->isolate()->serializer_enabled() &&
488              info()->GetMustNotHaveEagerFrame() &&
489              generator.NeedsEagerFrame()));
490     return code;
491   }
492   assembler.AbortedCodeGeneration();
493   return Handle<Code>::null();
494 }
495 
496 
set_allocated_double_registers(BitVector * allocated_registers)497 void LChunk::set_allocated_double_registers(BitVector* allocated_registers) {
498   allocated_double_registers_ = allocated_registers;
499   BitVector* doubles = allocated_double_registers();
500   BitVector::Iterator iterator(doubles);
501   while (!iterator.Done()) {
502     if (info()->saves_caller_doubles()) {
503       if (kDoubleSize == kPointerSize * 2) {
504         spill_slot_count_ += 2;
505       } else {
506         spill_slot_count_++;
507       }
508     }
509     iterator.Advance();
510   }
511 }
512 
513 
Abort(BailoutReason reason)514 void LChunkBuilderBase::Abort(BailoutReason reason) {
515   info()->AbortOptimization(reason);
516   status_ = ABORTED;
517 }
518 
519 
Retry(BailoutReason reason)520 void LChunkBuilderBase::Retry(BailoutReason reason) {
521   info()->RetryOptimization(reason);
522   status_ = ABORTED;
523 }
524 
525 
CreateEnvironment(HEnvironment * hydrogen_env,int * argument_index_accumulator,ZoneList<HValue * > * objects_to_materialize)526 LEnvironment* LChunkBuilderBase::CreateEnvironment(
527     HEnvironment* hydrogen_env, int* argument_index_accumulator,
528     ZoneList<HValue*>* objects_to_materialize) {
529   if (hydrogen_env == NULL) return NULL;
530 
531   LEnvironment* outer =
532       CreateEnvironment(hydrogen_env->outer(), argument_index_accumulator,
533                         objects_to_materialize);
534   BailoutId ast_id = hydrogen_env->ast_id();
535   DCHECK(!ast_id.IsNone() ||
536          hydrogen_env->frame_type() != JS_FUNCTION);
537 
538   int omitted_count = (hydrogen_env->frame_type() == JS_FUNCTION)
539                           ? 0
540                           : hydrogen_env->specials_count();
541 
542   int value_count = hydrogen_env->length() - omitted_count;
543   LEnvironment* result =
544       new(zone()) LEnvironment(hydrogen_env->closure(),
545                                hydrogen_env->frame_type(),
546                                ast_id,
547                                hydrogen_env->parameter_count(),
548                                argument_count_,
549                                value_count,
550                                outer,
551                                hydrogen_env->entry(),
552                                zone());
553   int argument_index = *argument_index_accumulator;
554 
555   // Store the environment description into the environment
556   // (with holes for nested objects)
557   for (int i = 0; i < hydrogen_env->length(); ++i) {
558     if (hydrogen_env->is_special_index(i) &&
559         hydrogen_env->frame_type() != JS_FUNCTION) {
560       continue;
561     }
562     LOperand* op;
563     HValue* value = hydrogen_env->values()->at(i);
564     CHECK(!value->IsPushArguments());  // Do not deopt outgoing arguments
565     if (value->IsArgumentsObject() || value->IsCapturedObject()) {
566       op = LEnvironment::materialization_marker();
567     } else {
568       op = UseAny(value);
569     }
570     result->AddValue(op,
571                      value->representation(),
572                      value->CheckFlag(HInstruction::kUint32));
573   }
574 
575   // Recursively store the nested objects into the environment
576   for (int i = 0; i < hydrogen_env->length(); ++i) {
577     if (hydrogen_env->is_special_index(i)) continue;
578 
579     HValue* value = hydrogen_env->values()->at(i);
580     if (value->IsArgumentsObject() || value->IsCapturedObject()) {
581       AddObjectToMaterialize(value, objects_to_materialize, result);
582     }
583   }
584 
585   if (hydrogen_env->frame_type() == JS_FUNCTION) {
586     *argument_index_accumulator = argument_index;
587   }
588 
589   return result;
590 }
591 
592 
593 // Add an object to the supplied environment and object materialization list.
594 //
595 // Notes:
596 //
597 // We are building three lists here:
598 //
599 // 1. In the result->object_mapping_ list (added to by the
600 //    LEnvironment::Add*Object methods), we store the lengths (number
601 //    of fields) of the captured objects in depth-first traversal order, or
602 //    in case of duplicated objects, we store the index to the duplicate object
603 //    (with a tag to differentiate between captured and duplicated objects).
604 //
605 // 2. The object fields are stored in the result->values_ list
606 //    (added to by the LEnvironment.AddValue method) sequentially as lists
607 //    of fields with holes for nested objects (the holes will be expanded
608 //    later by LCodegen::AddToTranslation according to the
609 //    LEnvironment.object_mapping_ list).
610 //
611 // 3. The auxiliary objects_to_materialize array stores the hydrogen values
612 //    in the same order as result->object_mapping_ list. This is used
613 //    to detect duplicate values and calculate the corresponding object index.
AddObjectToMaterialize(HValue * value,ZoneList<HValue * > * objects_to_materialize,LEnvironment * result)614 void LChunkBuilderBase::AddObjectToMaterialize(HValue* value,
615     ZoneList<HValue*>* objects_to_materialize, LEnvironment* result) {
616   int object_index = objects_to_materialize->length();
617   // Store the hydrogen value into the de-duplication array
618   objects_to_materialize->Add(value, zone());
619   // Find out whether we are storing a duplicated value
620   int previously_materialized_object = -1;
621   for (int prev = 0; prev < object_index; ++prev) {
622     if (objects_to_materialize->at(prev) == value) {
623       previously_materialized_object = prev;
624       break;
625     }
626   }
627   // Store the captured object length (or duplicated object index)
628   // into the environment. For duplicated objects, we stop here.
629   int length = value->OperandCount();
630   bool is_arguments = value->IsArgumentsObject();
631   if (previously_materialized_object >= 0) {
632     result->AddDuplicateObject(previously_materialized_object);
633     return;
634   } else {
635     result->AddNewObject(is_arguments ? length - 1 : length, is_arguments);
636   }
637   // Store the captured object's fields into the environment
638   for (int i = is_arguments ? 1 : 0; i < length; ++i) {
639     LOperand* op;
640     HValue* arg_value = value->OperandAt(i);
641     if (arg_value->IsArgumentsObject() || arg_value->IsCapturedObject()) {
642       // Insert a hole for nested objects
643       op = LEnvironment::materialization_marker();
644     } else {
645       DCHECK(!arg_value->IsPushArguments());
646       // For ordinary values, tell the register allocator we need the value
647       // to be alive here
648       op = UseAny(arg_value);
649     }
650     result->AddValue(op,
651                      arg_value->representation(),
652                      arg_value->CheckFlag(HInstruction::kUint32));
653   }
654   // Recursively store all the nested captured objects into the environment
655   for (int i = is_arguments ? 1 : 0; i < length; ++i) {
656     HValue* arg_value = value->OperandAt(i);
657     if (arg_value->IsArgumentsObject() || arg_value->IsCapturedObject()) {
658       AddObjectToMaterialize(arg_value, objects_to_materialize, result);
659     }
660   }
661 }
662 
663 
~LPhase()664 LPhase::~LPhase() {
665   if (ShouldProduceTraceOutput()) {
666     isolate()->GetHTracer()->TraceLithium(name(), chunk_);
667   }
668 }
669 
670 
671 } }  // namespace v8::internal
672