c15e9dbee2612f8e670a036a42357e76302c725e
[platform/upstream/nodejs.git] / deps / v8 / src / lithium.cc
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/lithium.h"
6
7 #include "src/v8.h"
8
9 #include "src/scopes.h"
10 #include "src/serialize.h"
11
12 #if V8_TARGET_ARCH_IA32
13 #include "src/ia32/lithium-ia32.h"  // NOLINT
14 #include "src/ia32/lithium-codegen-ia32.h"  // NOLINT
15 #elif V8_TARGET_ARCH_X64
16 #include "src/x64/lithium-x64.h"  // NOLINT
17 #include "src/x64/lithium-codegen-x64.h"  // NOLINT
18 #elif V8_TARGET_ARCH_ARM
19 #include "src/arm/lithium-arm.h"  // NOLINT
20 #include "src/arm/lithium-codegen-arm.h"  // NOLINT
21 #elif V8_TARGET_ARCH_PPC
22 #include "src/ppc/lithium-ppc.h"          // NOLINT
23 #include "src/ppc/lithium-codegen-ppc.h"  // NOLINT
24 #elif V8_TARGET_ARCH_MIPS
25 #include "src/mips/lithium-mips.h"  // NOLINT
26 #include "src/mips/lithium-codegen-mips.h"  // NOLINT
27 #elif V8_TARGET_ARCH_ARM64
28 #include "src/arm64/lithium-arm64.h"  // NOLINT
29 #include "src/arm64/lithium-codegen-arm64.h"  // NOLINT
30 #elif V8_TARGET_ARCH_MIPS64
31 #include "src/mips64/lithium-mips64.h"  // NOLINT
32 #include "src/mips64/lithium-codegen-mips64.h"  // NOLINT
33 #elif V8_TARGET_ARCH_X87
34 #include "src/x87/lithium-x87.h"  // NOLINT
35 #include "src/x87/lithium-codegen-x87.h"  // NOLINT
36 #else
37 #error "Unknown architecture."
38 #endif
39
40 namespace v8 {
41 namespace internal {
42
43
44 void LOperand::PrintTo(StringStream* stream) {
45   LUnallocated* unalloc = NULL;
46   switch (kind()) {
47     case INVALID:
48       stream->Add("(0)");
49       break;
50     case UNALLOCATED:
51       unalloc = LUnallocated::cast(this);
52       stream->Add("v%d", unalloc->virtual_register());
53       if (unalloc->basic_policy() == LUnallocated::FIXED_SLOT) {
54         stream->Add("(=%dS)", unalloc->fixed_slot_index());
55         break;
56       }
57       switch (unalloc->extended_policy()) {
58         case LUnallocated::NONE:
59           break;
60         case LUnallocated::FIXED_REGISTER: {
61           int reg_index = unalloc->fixed_register_index();
62           if (reg_index < 0 ||
63               reg_index >= Register::kMaxNumAllocatableRegisters) {
64             stream->Add("(=invalid_reg#%d)", reg_index);
65           } else {
66             const char* register_name =
67                 Register::AllocationIndexToString(reg_index);
68             stream->Add("(=%s)", register_name);
69           }
70           break;
71         }
72         case LUnallocated::FIXED_DOUBLE_REGISTER: {
73           int reg_index = unalloc->fixed_register_index();
74           if (reg_index < 0 ||
75               reg_index >= DoubleRegister::kMaxNumAllocatableRegisters) {
76             stream->Add("(=invalid_double_reg#%d)", reg_index);
77           } else {
78             const char* double_register_name =
79                 DoubleRegister::AllocationIndexToString(reg_index);
80             stream->Add("(=%s)", double_register_name);
81           }
82           break;
83         }
84         case LUnallocated::MUST_HAVE_REGISTER:
85           stream->Add("(R)");
86           break;
87         case LUnallocated::MUST_HAVE_DOUBLE_REGISTER:
88           stream->Add("(D)");
89           break;
90         case LUnallocated::WRITABLE_REGISTER:
91           stream->Add("(WR)");
92           break;
93         case LUnallocated::SAME_AS_FIRST_INPUT:
94           stream->Add("(1)");
95           break;
96         case LUnallocated::ANY:
97           stream->Add("(-)");
98           break;
99       }
100       break;
101     case CONSTANT_OPERAND:
102       stream->Add("[constant:%d]", index());
103       break;
104     case STACK_SLOT:
105       stream->Add("[stack:%d]", index());
106       break;
107     case DOUBLE_STACK_SLOT:
108       stream->Add("[double_stack:%d]", index());
109       break;
110     case REGISTER: {
111       int reg_index = index();
112       if (reg_index < 0 || reg_index >= Register::kMaxNumAllocatableRegisters) {
113         stream->Add("(=invalid_reg#%d|R)", reg_index);
114       } else {
115         stream->Add("[%s|R]", Register::AllocationIndexToString(reg_index));
116       }
117       break;
118     }
119     case DOUBLE_REGISTER: {
120       int reg_index = index();
121       if (reg_index < 0 ||
122           reg_index >= DoubleRegister::kMaxNumAllocatableRegisters) {
123         stream->Add("(=invalid_double_reg#%d|R)", reg_index);
124       } else {
125         stream->Add("[%s|R]",
126                     DoubleRegister::AllocationIndexToString(reg_index));
127       }
128       break;
129     }
130   }
131 }
132
133
134 template<LOperand::Kind kOperandKind, int kNumCachedOperands>
135 LSubKindOperand<kOperandKind, kNumCachedOperands>*
136 LSubKindOperand<kOperandKind, kNumCachedOperands>::cache = NULL;
137
138
139 template<LOperand::Kind kOperandKind, int kNumCachedOperands>
140 void LSubKindOperand<kOperandKind, kNumCachedOperands>::SetUpCache() {
141   if (cache) return;
142   cache = new LSubKindOperand[kNumCachedOperands];
143   for (int i = 0; i < kNumCachedOperands; i++) {
144     cache[i].ConvertTo(kOperandKind, i);
145   }
146 }
147
148
149 template<LOperand::Kind kOperandKind, int kNumCachedOperands>
150 void LSubKindOperand<kOperandKind, kNumCachedOperands>::TearDownCache() {
151   delete[] cache;
152   cache = NULL;
153 }
154
155
156 void LOperand::SetUpCaches() {
157 #define LITHIUM_OPERAND_SETUP(name, type, number) L##name::SetUpCache();
158   LITHIUM_OPERAND_LIST(LITHIUM_OPERAND_SETUP)
159 #undef LITHIUM_OPERAND_SETUP
160 }
161
162
163 void LOperand::TearDownCaches() {
164 #define LITHIUM_OPERAND_TEARDOWN(name, type, number) L##name::TearDownCache();
165   LITHIUM_OPERAND_LIST(LITHIUM_OPERAND_TEARDOWN)
166 #undef LITHIUM_OPERAND_TEARDOWN
167 }
168
169
170 bool LParallelMove::IsRedundant() const {
171   for (int i = 0; i < move_operands_.length(); ++i) {
172     if (!move_operands_[i].IsRedundant()) return false;
173   }
174   return true;
175 }
176
177
178 void LParallelMove::PrintDataTo(StringStream* stream) const {
179   bool first = true;
180   for (int i = 0; i < move_operands_.length(); ++i) {
181     if (!move_operands_[i].IsEliminated()) {
182       LOperand* source = move_operands_[i].source();
183       LOperand* destination = move_operands_[i].destination();
184       if (!first) stream->Add(" ");
185       first = false;
186       if (source->Equals(destination)) {
187         destination->PrintTo(stream);
188       } else {
189         destination->PrintTo(stream);
190         stream->Add(" = ");
191         source->PrintTo(stream);
192       }
193       stream->Add(";");
194     }
195   }
196 }
197
198
199 void LEnvironment::PrintTo(StringStream* stream) {
200   stream->Add("[id=%d|", ast_id().ToInt());
201   if (deoptimization_index() != Safepoint::kNoDeoptimizationIndex) {
202     stream->Add("deopt_id=%d|", deoptimization_index());
203   }
204   stream->Add("parameters=%d|", parameter_count());
205   stream->Add("arguments_stack_height=%d|", arguments_stack_height());
206   for (int i = 0; i < values_.length(); ++i) {
207     if (i != 0) stream->Add(";");
208     if (values_[i] == NULL) {
209       stream->Add("[hole]");
210     } else {
211       values_[i]->PrintTo(stream);
212     }
213   }
214   stream->Add("]");
215 }
216
217
218 void LPointerMap::RecordPointer(LOperand* op, Zone* zone) {
219   // Do not record arguments as pointers.
220   if (op->IsStackSlot() && op->index() < 0) return;
221   DCHECK(!op->IsDoubleRegister() && !op->IsDoubleStackSlot());
222   pointer_operands_.Add(op, zone);
223 }
224
225
226 void LPointerMap::RemovePointer(LOperand* op) {
227   // Do not record arguments as pointers.
228   if (op->IsStackSlot() && op->index() < 0) return;
229   DCHECK(!op->IsDoubleRegister() && !op->IsDoubleStackSlot());
230   for (int i = 0; i < pointer_operands_.length(); ++i) {
231     if (pointer_operands_[i]->Equals(op)) {
232       pointer_operands_.Remove(i);
233       --i;
234     }
235   }
236 }
237
238
239 void LPointerMap::RecordUntagged(LOperand* op, Zone* zone) {
240   // Do not record arguments as pointers.
241   if (op->IsStackSlot() && op->index() < 0) return;
242   DCHECK(!op->IsDoubleRegister() && !op->IsDoubleStackSlot());
243   untagged_operands_.Add(op, zone);
244 }
245
246
247 void LPointerMap::PrintTo(StringStream* stream) {
248   stream->Add("{");
249   for (int i = 0; i < pointer_operands_.length(); ++i) {
250     if (i != 0) stream->Add(";");
251     pointer_operands_[i]->PrintTo(stream);
252   }
253   stream->Add("}");
254 }
255
256
257 int StackSlotOffset(int index) {
258   if (index >= 0) {
259     // Local or spill slot. Skip the frame pointer, function, and
260     // context in the fixed part of the frame.
261     return -(index + 1) * kPointerSize -
262         StandardFrameConstants::kFixedFrameSizeFromFp;
263   } else {
264     // Incoming parameter. Skip the return address.
265     return -(index + 1) * kPointerSize + kFPOnStackSize + kPCOnStackSize;
266   }
267 }
268
269
270 LChunk::LChunk(CompilationInfo* info, HGraph* graph)
271     : spill_slot_count_(0),
272       info_(info),
273       graph_(graph),
274       instructions_(32, info->zone()),
275       pointer_maps_(8, info->zone()),
276       inlined_closures_(1, info->zone()),
277       deprecation_dependencies_(MapLess(), MapAllocator(info->zone())),
278       stability_dependencies_(MapLess(), MapAllocator(info->zone())) {}
279
280
281 LLabel* LChunk::GetLabel(int block_id) const {
282   HBasicBlock* block = graph_->blocks()->at(block_id);
283   int first_instruction = block->first_instruction_index();
284   return LLabel::cast(instructions_[first_instruction]);
285 }
286
287
288 int LChunk::LookupDestination(int block_id) const {
289   LLabel* cur = GetLabel(block_id);
290   while (cur->replacement() != NULL) {
291     cur = cur->replacement();
292   }
293   return cur->block_id();
294 }
295
296 Label* LChunk::GetAssemblyLabel(int block_id) const {
297   LLabel* label = GetLabel(block_id);
298   DCHECK(!label->HasReplacement());
299   return label->label();
300 }
301
302
303 void LChunk::MarkEmptyBlocks() {
304   LPhase phase("L_Mark empty blocks", this);
305   for (int i = 0; i < graph()->blocks()->length(); ++i) {
306     HBasicBlock* block = graph()->blocks()->at(i);
307     int first = block->first_instruction_index();
308     int last = block->last_instruction_index();
309     LInstruction* first_instr = instructions()->at(first);
310     LInstruction* last_instr = instructions()->at(last);
311
312     LLabel* label = LLabel::cast(first_instr);
313     if (last_instr->IsGoto()) {
314       LGoto* goto_instr = LGoto::cast(last_instr);
315       if (label->IsRedundant() &&
316           !label->is_loop_header()) {
317         bool can_eliminate = true;
318         for (int i = first + 1; i < last && can_eliminate; ++i) {
319           LInstruction* cur = instructions()->at(i);
320           if (cur->IsGap()) {
321             LGap* gap = LGap::cast(cur);
322             if (!gap->IsRedundant()) {
323               can_eliminate = false;
324             }
325           } else {
326             can_eliminate = false;
327           }
328         }
329         if (can_eliminate) {
330           label->set_replacement(GetLabel(goto_instr->block_id()));
331         }
332       }
333     }
334   }
335 }
336
337
338 void LChunk::AddInstruction(LInstruction* instr, HBasicBlock* block) {
339   LInstructionGap* gap = new (zone()) LInstructionGap(block);
340   gap->set_hydrogen_value(instr->hydrogen_value());
341   int index = -1;
342   if (instr->IsControl()) {
343     instructions_.Add(gap, zone());
344     index = instructions_.length();
345     instructions_.Add(instr, zone());
346   } else {
347     index = instructions_.length();
348     instructions_.Add(instr, zone());
349     instructions_.Add(gap, zone());
350   }
351   if (instr->HasPointerMap()) {
352     pointer_maps_.Add(instr->pointer_map(), zone());
353     instr->pointer_map()->set_lithium_position(index);
354   }
355 }
356
357
358 LConstantOperand* LChunk::DefineConstantOperand(HConstant* constant) {
359   return LConstantOperand::Create(constant->id(), zone());
360 }
361
362
363 int LChunk::GetParameterStackSlot(int index) const {
364   // The receiver is at index 0, the first parameter at index 1, so we
365   // shift all parameter indexes down by the number of parameters, and
366   // make sure they end up negative so they are distinguishable from
367   // spill slots.
368   int result = index - info()->num_parameters() - 1;
369
370   DCHECK(result < 0);
371   return result;
372 }
373
374
375 // A parameter relative to ebp in the arguments stub.
376 int LChunk::ParameterAt(int index) {
377   DCHECK(-1 <= index);  // -1 is the receiver.
378   return (1 + info()->scope()->num_parameters() - index) *
379       kPointerSize;
380 }
381
382
383 LGap* LChunk::GetGapAt(int index) const {
384   return LGap::cast(instructions_[index]);
385 }
386
387
388 bool LChunk::IsGapAt(int index) const {
389   return instructions_[index]->IsGap();
390 }
391
392
393 int LChunk::NearestGapPos(int index) const {
394   while (!IsGapAt(index)) index--;
395   return index;
396 }
397
398
399 void LChunk::AddGapMove(int index, LOperand* from, LOperand* to) {
400   GetGapAt(index)->GetOrCreateParallelMove(
401       LGap::START, zone())->AddMove(from, to, zone());
402 }
403
404
405 HConstant* LChunk::LookupConstant(LConstantOperand* operand) const {
406   return HConstant::cast(graph_->LookupValue(operand->index()));
407 }
408
409
410 Representation LChunk::LookupLiteralRepresentation(
411     LConstantOperand* operand) const {
412   return graph_->LookupValue(operand->index())->representation();
413 }
414
415
416 static void AddWeakObjectToCodeDependency(Isolate* isolate,
417                                           Handle<HeapObject> object,
418                                           Handle<Code> code) {
419   Handle<WeakCell> cell = Code::WeakCellFor(code);
420   Heap* heap = isolate->heap();
421   Handle<DependentCode> dep(heap->LookupWeakObjectToCodeDependency(object));
422   dep = DependentCode::InsertWeakCode(dep, DependentCode::kWeakCodeGroup, cell);
423   heap->AddWeakObjectToCodeDependency(object, dep);
424 }
425
426
427 void LChunk::RegisterWeakObjectsInOptimizedCode(Handle<Code> code) const {
428   DCHECK(code->is_optimized_code());
429   ZoneList<Handle<Map> > maps(1, zone());
430   ZoneList<Handle<HeapObject> > objects(1, zone());
431   int mode_mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) |
432                   RelocInfo::ModeMask(RelocInfo::CELL);
433   for (RelocIterator it(*code, mode_mask); !it.done(); it.next()) {
434     RelocInfo::Mode mode = it.rinfo()->rmode();
435     if (mode == RelocInfo::CELL &&
436         code->IsWeakObjectInOptimizedCode(it.rinfo()->target_cell())) {
437       objects.Add(Handle<HeapObject>(it.rinfo()->target_cell()), zone());
438     } else if (mode == RelocInfo::EMBEDDED_OBJECT &&
439                code->IsWeakObjectInOptimizedCode(it.rinfo()->target_object())) {
440       if (it.rinfo()->target_object()->IsMap()) {
441         Handle<Map> map(Map::cast(it.rinfo()->target_object()));
442         maps.Add(map, zone());
443       } else {
444         Handle<HeapObject> object(
445             HeapObject::cast(it.rinfo()->target_object()));
446         objects.Add(object, zone());
447       }
448     }
449   }
450   for (int i = 0; i < maps.length(); i++) {
451     Map::AddDependentCode(maps.at(i), DependentCode::kWeakCodeGroup, code);
452   }
453   for (int i = 0; i < objects.length(); i++) {
454     AddWeakObjectToCodeDependency(isolate(), objects.at(i), code);
455   }
456   if (FLAG_enable_ool_constant_pool) {
457     code->constant_pool()->set_weak_object_state(
458         ConstantPoolArray::WEAK_OBJECTS_IN_OPTIMIZED_CODE);
459   }
460   code->set_can_have_weak_objects(true);
461 }
462
463
464 void LChunk::CommitDependencies(Handle<Code> code) const {
465   if (!code->is_optimized_code()) return;
466   HandleScope scope(isolate());
467
468   for (MapSet::const_iterator it = deprecation_dependencies_.begin(),
469        iend = deprecation_dependencies_.end(); it != iend; ++it) {
470     Handle<Map> map = *it;
471     DCHECK(!map->is_deprecated());
472     DCHECK(map->CanBeDeprecated());
473     Map::AddDependentCode(map, DependentCode::kTransitionGroup, code);
474   }
475
476   for (MapSet::const_iterator it = stability_dependencies_.begin(),
477        iend = stability_dependencies_.end(); it != iend; ++it) {
478     Handle<Map> map = *it;
479     DCHECK(map->is_stable());
480     DCHECK(map->CanTransition());
481     Map::AddDependentCode(map, DependentCode::kPrototypeCheckGroup, code);
482   }
483
484   info_->CommitDependencies(code);
485   RegisterWeakObjectsInOptimizedCode(code);
486 }
487
488
489 LChunk* LChunk::NewChunk(HGraph* graph) {
490   DisallowHandleAllocation no_handles;
491   DisallowHeapAllocation no_gc;
492   graph->DisallowAddingNewValues();
493   int values = graph->GetMaximumValueID();
494   CompilationInfo* info = graph->info();
495   if (values > LUnallocated::kMaxVirtualRegisters) {
496     info->AbortOptimization(kNotEnoughVirtualRegistersForValues);
497     return NULL;
498   }
499   LAllocator allocator(values, graph);
500   LChunkBuilder builder(info, graph, &allocator);
501   LChunk* chunk = builder.Build();
502   if (chunk == NULL) return NULL;
503
504   if (!allocator.Allocate(chunk)) {
505     info->AbortOptimization(kNotEnoughVirtualRegistersRegalloc);
506     return NULL;
507   }
508
509   chunk->set_allocated_double_registers(
510       allocator.assigned_double_registers());
511
512   return chunk;
513 }
514
515
516 Handle<Code> LChunk::Codegen() {
517   MacroAssembler assembler(info()->isolate(), NULL, 0);
518   LOG_CODE_EVENT(info()->isolate(),
519                  CodeStartLinePosInfoRecordEvent(
520                      assembler.positions_recorder()));
521   // Code serializer only takes unoptimized code.
522   DCHECK(!info()->will_serialize());
523   LCodeGen generator(this, &assembler, info());
524
525   MarkEmptyBlocks();
526
527   if (generator.GenerateCode()) {
528     generator.CheckEnvironmentUsage();
529     CodeGenerator::MakeCodePrologue(info(), "optimized");
530     Code::Flags flags = info()->flags();
531     Handle<Code> code =
532         CodeGenerator::MakeCodeEpilogue(&assembler, flags, info());
533     generator.FinishCode(code);
534     CommitDependencies(code);
535     code->set_is_crankshafted(true);
536     void* jit_handler_data =
537         assembler.positions_recorder()->DetachJITHandlerData();
538     LOG_CODE_EVENT(info()->isolate(),
539                    CodeEndLinePosInfoRecordEvent(*code, jit_handler_data));
540
541     CodeGenerator::PrintCode(code, info());
542     DCHECK(!(info()->isolate()->serializer_enabled() &&
543              info()->GetMustNotHaveEagerFrame() &&
544              generator.NeedsEagerFrame()));
545     return code;
546   }
547   assembler.AbortedCodeGeneration();
548   return Handle<Code>::null();
549 }
550
551
552 void LChunk::set_allocated_double_registers(BitVector* allocated_registers) {
553   allocated_double_registers_ = allocated_registers;
554   BitVector* doubles = allocated_double_registers();
555   BitVector::Iterator iterator(doubles);
556   while (!iterator.Done()) {
557     if (info()->saves_caller_doubles()) {
558       if (kDoubleSize == kPointerSize * 2) {
559         spill_slot_count_ += 2;
560       } else {
561         spill_slot_count_++;
562       }
563     }
564     iterator.Advance();
565   }
566 }
567
568
569 void LChunkBuilderBase::Abort(BailoutReason reason) {
570   info()->AbortOptimization(reason);
571   status_ = ABORTED;
572 }
573
574
575 void LChunkBuilderBase::Retry(BailoutReason reason) {
576   info()->RetryOptimization(reason);
577   status_ = ABORTED;
578 }
579
580
581 LEnvironment* LChunkBuilderBase::CreateEnvironment(
582     HEnvironment* hydrogen_env, int* argument_index_accumulator,
583     ZoneList<HValue*>* objects_to_materialize) {
584   if (hydrogen_env == NULL) return NULL;
585
586   LEnvironment* outer =
587       CreateEnvironment(hydrogen_env->outer(), argument_index_accumulator,
588                         objects_to_materialize);
589   BailoutId ast_id = hydrogen_env->ast_id();
590   DCHECK(!ast_id.IsNone() ||
591          hydrogen_env->frame_type() != JS_FUNCTION);
592
593   int omitted_count = (hydrogen_env->frame_type() == JS_FUNCTION)
594                           ? 0
595                           : hydrogen_env->specials_count();
596
597   int value_count = hydrogen_env->length() - omitted_count;
598   LEnvironment* result =
599       new(zone()) LEnvironment(hydrogen_env->closure(),
600                                hydrogen_env->frame_type(),
601                                ast_id,
602                                hydrogen_env->parameter_count(),
603                                argument_count_,
604                                value_count,
605                                outer,
606                                hydrogen_env->entry(),
607                                zone());
608   int argument_index = *argument_index_accumulator;
609
610   // Store the environment description into the environment
611   // (with holes for nested objects)
612   for (int i = 0; i < hydrogen_env->length(); ++i) {
613     if (hydrogen_env->is_special_index(i) &&
614         hydrogen_env->frame_type() != JS_FUNCTION) {
615       continue;
616     }
617     LOperand* op;
618     HValue* value = hydrogen_env->values()->at(i);
619     CHECK(!value->IsPushArguments());  // Do not deopt outgoing arguments
620     if (value->IsArgumentsObject() || value->IsCapturedObject()) {
621       op = LEnvironment::materialization_marker();
622     } else {
623       op = UseAny(value);
624     }
625     result->AddValue(op,
626                      value->representation(),
627                      value->CheckFlag(HInstruction::kUint32));
628   }
629
630   // Recursively store the nested objects into the environment
631   for (int i = 0; i < hydrogen_env->length(); ++i) {
632     if (hydrogen_env->is_special_index(i)) continue;
633
634     HValue* value = hydrogen_env->values()->at(i);
635     if (value->IsArgumentsObject() || value->IsCapturedObject()) {
636       AddObjectToMaterialize(value, objects_to_materialize, result);
637     }
638   }
639
640   if (hydrogen_env->frame_type() == JS_FUNCTION) {
641     *argument_index_accumulator = argument_index;
642   }
643
644   return result;
645 }
646
647
648 // Add an object to the supplied environment and object materialization list.
649 //
650 // Notes:
651 //
652 // We are building three lists here:
653 //
654 // 1. In the result->object_mapping_ list (added to by the
655 //    LEnvironment::Add*Object methods), we store the lengths (number
656 //    of fields) of the captured objects in depth-first traversal order, or
657 //    in case of duplicated objects, we store the index to the duplicate object
658 //    (with a tag to differentiate between captured and duplicated objects).
659 //
660 // 2. The object fields are stored in the result->values_ list
661 //    (added to by the LEnvironment.AddValue method) sequentially as lists
662 //    of fields with holes for nested objects (the holes will be expanded
663 //    later by LCodegen::AddToTranslation according to the
664 //    LEnvironment.object_mapping_ list).
665 //
666 // 3. The auxiliary objects_to_materialize array stores the hydrogen values
667 //    in the same order as result->object_mapping_ list. This is used
668 //    to detect duplicate values and calculate the corresponding object index.
669 void LChunkBuilderBase::AddObjectToMaterialize(HValue* value,
670     ZoneList<HValue*>* objects_to_materialize, LEnvironment* result) {
671   int object_index = objects_to_materialize->length();
672   // Store the hydrogen value into the de-duplication array
673   objects_to_materialize->Add(value, zone());
674   // Find out whether we are storing a duplicated value
675   int previously_materialized_object = -1;
676   for (int prev = 0; prev < object_index; ++prev) {
677     if (objects_to_materialize->at(prev) == value) {
678       previously_materialized_object = prev;
679       break;
680     }
681   }
682   // Store the captured object length (or duplicated object index)
683   // into the environment. For duplicated objects, we stop here.
684   int length = value->OperandCount();
685   bool is_arguments = value->IsArgumentsObject();
686   if (previously_materialized_object >= 0) {
687     result->AddDuplicateObject(previously_materialized_object);
688     return;
689   } else {
690     result->AddNewObject(is_arguments ? length - 1 : length, is_arguments);
691   }
692   // Store the captured object's fields into the environment
693   for (int i = is_arguments ? 1 : 0; i < length; ++i) {
694     LOperand* op;
695     HValue* arg_value = value->OperandAt(i);
696     if (arg_value->IsArgumentsObject() || arg_value->IsCapturedObject()) {
697       // Insert a hole for nested objects
698       op = LEnvironment::materialization_marker();
699     } else {
700       DCHECK(!arg_value->IsPushArguments());
701       // For ordinary values, tell the register allocator we need the value
702       // to be alive here
703       op = UseAny(arg_value);
704     }
705     result->AddValue(op,
706                      arg_value->representation(),
707                      arg_value->CheckFlag(HInstruction::kUint32));
708   }
709   // Recursively store all the nested captured objects into the environment
710   for (int i = is_arguments ? 1 : 0; i < length; ++i) {
711     HValue* arg_value = value->OperandAt(i);
712     if (arg_value->IsArgumentsObject() || arg_value->IsCapturedObject()) {
713       AddObjectToMaterialize(arg_value, objects_to_materialize, result);
714     }
715   }
716 }
717
718
719 LPhase::~LPhase() {
720   if (ShouldProduceTraceOutput()) {
721     isolate()->GetHTracer()->TraceLithium(name(), chunk_);
722   }
723 }
724
725
726 } }  // namespace v8::internal