deps: update v8 to 4.3.61.21
[platform/upstream/nodejs.git] / deps / v8 / src / lithium.cc
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/lithium.h"
6
7 #include "src/v8.h"
8
9 #include "src/scopes.h"
10
11 #if V8_TARGET_ARCH_IA32
12 #include "src/ia32/lithium-ia32.h"  // NOLINT
13 #include "src/ia32/lithium-codegen-ia32.h"  // NOLINT
14 #elif V8_TARGET_ARCH_X64
15 #include "src/x64/lithium-x64.h"  // NOLINT
16 #include "src/x64/lithium-codegen-x64.h"  // NOLINT
17 #elif V8_TARGET_ARCH_ARM
18 #include "src/arm/lithium-arm.h"  // NOLINT
19 #include "src/arm/lithium-codegen-arm.h"  // NOLINT
20 #elif V8_TARGET_ARCH_PPC
21 #include "src/ppc/lithium-ppc.h"          // NOLINT
22 #include "src/ppc/lithium-codegen-ppc.h"  // NOLINT
23 #elif V8_TARGET_ARCH_MIPS
24 #include "src/mips/lithium-mips.h"  // NOLINT
25 #include "src/mips/lithium-codegen-mips.h"  // NOLINT
26 #elif V8_TARGET_ARCH_ARM64
27 #include "src/arm64/lithium-arm64.h"  // NOLINT
28 #include "src/arm64/lithium-codegen-arm64.h"  // NOLINT
29 #elif V8_TARGET_ARCH_MIPS64
30 #include "src/mips64/lithium-mips64.h"  // NOLINT
31 #include "src/mips64/lithium-codegen-mips64.h"  // NOLINT
32 #elif V8_TARGET_ARCH_X87
33 #include "src/x87/lithium-x87.h"  // NOLINT
34 #include "src/x87/lithium-codegen-x87.h"  // NOLINT
35 #else
36 #error "Unknown architecture."
37 #endif
38
39 namespace v8 {
40 namespace internal {
41
42
43 void LOperand::PrintTo(StringStream* stream) {
44   LUnallocated* unalloc = NULL;
45   switch (kind()) {
46     case INVALID:
47       stream->Add("(0)");
48       break;
49     case UNALLOCATED:
50       unalloc = LUnallocated::cast(this);
51       stream->Add("v%d", unalloc->virtual_register());
52       if (unalloc->basic_policy() == LUnallocated::FIXED_SLOT) {
53         stream->Add("(=%dS)", unalloc->fixed_slot_index());
54         break;
55       }
56       switch (unalloc->extended_policy()) {
57         case LUnallocated::NONE:
58           break;
59         case LUnallocated::FIXED_REGISTER: {
60           int reg_index = unalloc->fixed_register_index();
61           if (reg_index < 0 ||
62               reg_index >= Register::kMaxNumAllocatableRegisters) {
63             stream->Add("(=invalid_reg#%d)", reg_index);
64           } else {
65             const char* register_name =
66                 Register::AllocationIndexToString(reg_index);
67             stream->Add("(=%s)", register_name);
68           }
69           break;
70         }
71         case LUnallocated::FIXED_DOUBLE_REGISTER: {
72           int reg_index = unalloc->fixed_register_index();
73           if (reg_index < 0 ||
74               reg_index >= DoubleRegister::kMaxNumAllocatableRegisters) {
75             stream->Add("(=invalid_double_reg#%d)", reg_index);
76           } else {
77             const char* double_register_name =
78                 DoubleRegister::AllocationIndexToString(reg_index);
79             stream->Add("(=%s)", double_register_name);
80           }
81           break;
82         }
83         case LUnallocated::MUST_HAVE_REGISTER:
84           stream->Add("(R)");
85           break;
86         case LUnallocated::MUST_HAVE_DOUBLE_REGISTER:
87           stream->Add("(D)");
88           break;
89         case LUnallocated::WRITABLE_REGISTER:
90           stream->Add("(WR)");
91           break;
92         case LUnallocated::SAME_AS_FIRST_INPUT:
93           stream->Add("(1)");
94           break;
95         case LUnallocated::ANY:
96           stream->Add("(-)");
97           break;
98       }
99       break;
100     case CONSTANT_OPERAND:
101       stream->Add("[constant:%d]", index());
102       break;
103     case STACK_SLOT:
104       stream->Add("[stack:%d]", index());
105       break;
106     case DOUBLE_STACK_SLOT:
107       stream->Add("[double_stack:%d]", index());
108       break;
109     case REGISTER: {
110       int reg_index = index();
111       if (reg_index < 0 || reg_index >= Register::kMaxNumAllocatableRegisters) {
112         stream->Add("(=invalid_reg#%d|R)", reg_index);
113       } else {
114         stream->Add("[%s|R]", Register::AllocationIndexToString(reg_index));
115       }
116       break;
117     }
118     case DOUBLE_REGISTER: {
119       int reg_index = index();
120       if (reg_index < 0 ||
121           reg_index >= DoubleRegister::kMaxNumAllocatableRegisters) {
122         stream->Add("(=invalid_double_reg#%d|R)", reg_index);
123       } else {
124         stream->Add("[%s|R]",
125                     DoubleRegister::AllocationIndexToString(reg_index));
126       }
127       break;
128     }
129   }
130 }
131
132
133 template<LOperand::Kind kOperandKind, int kNumCachedOperands>
134 LSubKindOperand<kOperandKind, kNumCachedOperands>*
135 LSubKindOperand<kOperandKind, kNumCachedOperands>::cache = NULL;
136
137
138 template<LOperand::Kind kOperandKind, int kNumCachedOperands>
139 void LSubKindOperand<kOperandKind, kNumCachedOperands>::SetUpCache() {
140   if (cache) return;
141   cache = new LSubKindOperand[kNumCachedOperands];
142   for (int i = 0; i < kNumCachedOperands; i++) {
143     cache[i].ConvertTo(kOperandKind, i);
144   }
145 }
146
147
148 template<LOperand::Kind kOperandKind, int kNumCachedOperands>
149 void LSubKindOperand<kOperandKind, kNumCachedOperands>::TearDownCache() {
150   delete[] cache;
151   cache = NULL;
152 }
153
154
155 void LOperand::SetUpCaches() {
156 #define LITHIUM_OPERAND_SETUP(name, type, number) L##name::SetUpCache();
157   LITHIUM_OPERAND_LIST(LITHIUM_OPERAND_SETUP)
158 #undef LITHIUM_OPERAND_SETUP
159 }
160
161
162 void LOperand::TearDownCaches() {
163 #define LITHIUM_OPERAND_TEARDOWN(name, type, number) L##name::TearDownCache();
164   LITHIUM_OPERAND_LIST(LITHIUM_OPERAND_TEARDOWN)
165 #undef LITHIUM_OPERAND_TEARDOWN
166 }
167
168
169 bool LParallelMove::IsRedundant() const {
170   for (int i = 0; i < move_operands_.length(); ++i) {
171     if (!move_operands_[i].IsRedundant()) return false;
172   }
173   return true;
174 }
175
176
177 void LParallelMove::PrintDataTo(StringStream* stream) const {
178   bool first = true;
179   for (int i = 0; i < move_operands_.length(); ++i) {
180     if (!move_operands_[i].IsEliminated()) {
181       LOperand* source = move_operands_[i].source();
182       LOperand* destination = move_operands_[i].destination();
183       if (!first) stream->Add(" ");
184       first = false;
185       if (source->Equals(destination)) {
186         destination->PrintTo(stream);
187       } else {
188         destination->PrintTo(stream);
189         stream->Add(" = ");
190         source->PrintTo(stream);
191       }
192       stream->Add(";");
193     }
194   }
195 }
196
197
198 void LEnvironment::PrintTo(StringStream* stream) {
199   stream->Add("[id=%d|", ast_id().ToInt());
200   if (deoptimization_index() != Safepoint::kNoDeoptimizationIndex) {
201     stream->Add("deopt_id=%d|", deoptimization_index());
202   }
203   stream->Add("parameters=%d|", parameter_count());
204   stream->Add("arguments_stack_height=%d|", arguments_stack_height());
205   for (int i = 0; i < values_.length(); ++i) {
206     if (i != 0) stream->Add(";");
207     if (values_[i] == NULL) {
208       stream->Add("[hole]");
209     } else {
210       values_[i]->PrintTo(stream);
211     }
212   }
213   stream->Add("]");
214 }
215
216
217 void LPointerMap::RecordPointer(LOperand* op, Zone* zone) {
218   // Do not record arguments as pointers.
219   if (op->IsStackSlot() && op->index() < 0) return;
220   DCHECK(!op->IsDoubleRegister() && !op->IsDoubleStackSlot());
221   pointer_operands_.Add(op, zone);
222 }
223
224
225 void LPointerMap::RemovePointer(LOperand* op) {
226   // Do not record arguments as pointers.
227   if (op->IsStackSlot() && op->index() < 0) return;
228   DCHECK(!op->IsDoubleRegister() && !op->IsDoubleStackSlot());
229   for (int i = 0; i < pointer_operands_.length(); ++i) {
230     if (pointer_operands_[i]->Equals(op)) {
231       pointer_operands_.Remove(i);
232       --i;
233     }
234   }
235 }
236
237
238 void LPointerMap::RecordUntagged(LOperand* op, Zone* zone) {
239   // Do not record arguments as pointers.
240   if (op->IsStackSlot() && op->index() < 0) return;
241   DCHECK(!op->IsDoubleRegister() && !op->IsDoubleStackSlot());
242   untagged_operands_.Add(op, zone);
243 }
244
245
246 void LPointerMap::PrintTo(StringStream* stream) {
247   stream->Add("{");
248   for (int i = 0; i < pointer_operands_.length(); ++i) {
249     if (i != 0) stream->Add(";");
250     pointer_operands_[i]->PrintTo(stream);
251   }
252   stream->Add("}");
253 }
254
255
256 int StackSlotOffset(int index) {
257   if (index >= 0) {
258     // Local or spill slot. Skip the frame pointer, function, and
259     // context in the fixed part of the frame.
260     return -(index + 1) * kPointerSize -
261         StandardFrameConstants::kFixedFrameSizeFromFp;
262   } else {
263     // Incoming parameter. Skip the return address.
264     return -(index + 1) * kPointerSize + kFPOnStackSize + kPCOnStackSize;
265   }
266 }
267
268
269 LChunk::LChunk(CompilationInfo* info, HGraph* graph)
270     : spill_slot_count_(0),
271       info_(info),
272       graph_(graph),
273       instructions_(32, info->zone()),
274       pointer_maps_(8, info->zone()),
275       inlined_closures_(1, info->zone()),
276       deprecation_dependencies_(MapLess(), MapAllocator(info->zone())),
277       stability_dependencies_(MapLess(), MapAllocator(info->zone())) {}
278
279
280 LLabel* LChunk::GetLabel(int block_id) const {
281   HBasicBlock* block = graph_->blocks()->at(block_id);
282   int first_instruction = block->first_instruction_index();
283   return LLabel::cast(instructions_[first_instruction]);
284 }
285
286
287 int LChunk::LookupDestination(int block_id) const {
288   LLabel* cur = GetLabel(block_id);
289   while (cur->replacement() != NULL) {
290     cur = cur->replacement();
291   }
292   return cur->block_id();
293 }
294
295 Label* LChunk::GetAssemblyLabel(int block_id) const {
296   LLabel* label = GetLabel(block_id);
297   DCHECK(!label->HasReplacement());
298   return label->label();
299 }
300
301
302 void LChunk::MarkEmptyBlocks() {
303   LPhase phase("L_Mark empty blocks", this);
304   for (int i = 0; i < graph()->blocks()->length(); ++i) {
305     HBasicBlock* block = graph()->blocks()->at(i);
306     int first = block->first_instruction_index();
307     int last = block->last_instruction_index();
308     LInstruction* first_instr = instructions()->at(first);
309     LInstruction* last_instr = instructions()->at(last);
310
311     LLabel* label = LLabel::cast(first_instr);
312     if (last_instr->IsGoto()) {
313       LGoto* goto_instr = LGoto::cast(last_instr);
314       if (label->IsRedundant() &&
315           !label->is_loop_header()) {
316         bool can_eliminate = true;
317         for (int i = first + 1; i < last && can_eliminate; ++i) {
318           LInstruction* cur = instructions()->at(i);
319           if (cur->IsGap()) {
320             LGap* gap = LGap::cast(cur);
321             if (!gap->IsRedundant()) {
322               can_eliminate = false;
323             }
324           } else {
325             can_eliminate = false;
326           }
327         }
328         if (can_eliminate) {
329           label->set_replacement(GetLabel(goto_instr->block_id()));
330         }
331       }
332     }
333   }
334 }
335
336
337 void LChunk::AddInstruction(LInstruction* instr, HBasicBlock* block) {
338   LInstructionGap* gap = new (zone()) LInstructionGap(block);
339   gap->set_hydrogen_value(instr->hydrogen_value());
340   int index = -1;
341   if (instr->IsControl()) {
342     instructions_.Add(gap, zone());
343     index = instructions_.length();
344     instructions_.Add(instr, zone());
345   } else {
346     index = instructions_.length();
347     instructions_.Add(instr, zone());
348     instructions_.Add(gap, zone());
349   }
350   if (instr->HasPointerMap()) {
351     pointer_maps_.Add(instr->pointer_map(), zone());
352     instr->pointer_map()->set_lithium_position(index);
353   }
354 }
355
356
357 LConstantOperand* LChunk::DefineConstantOperand(HConstant* constant) {
358   return LConstantOperand::Create(constant->id(), zone());
359 }
360
361
362 int LChunk::GetParameterStackSlot(int index) const {
363   // The receiver is at index 0, the first parameter at index 1, so we
364   // shift all parameter indexes down by the number of parameters, and
365   // make sure they end up negative so they are distinguishable from
366   // spill slots.
367   int result = index - info()->num_parameters() - 1;
368
369   DCHECK(result < 0);
370   return result;
371 }
372
373
374 // A parameter relative to ebp in the arguments stub.
375 int LChunk::ParameterAt(int index) {
376   DCHECK(-1 <= index);  // -1 is the receiver.
377   return (1 + info()->scope()->num_parameters() - index) *
378       kPointerSize;
379 }
380
381
382 LGap* LChunk::GetGapAt(int index) const {
383   return LGap::cast(instructions_[index]);
384 }
385
386
387 bool LChunk::IsGapAt(int index) const {
388   return instructions_[index]->IsGap();
389 }
390
391
392 int LChunk::NearestGapPos(int index) const {
393   while (!IsGapAt(index)) index--;
394   return index;
395 }
396
397
398 void LChunk::AddGapMove(int index, LOperand* from, LOperand* to) {
399   GetGapAt(index)->GetOrCreateParallelMove(
400       LGap::START, zone())->AddMove(from, to, zone());
401 }
402
403
404 HConstant* LChunk::LookupConstant(LConstantOperand* operand) const {
405   return HConstant::cast(graph_->LookupValue(operand->index()));
406 }
407
408
409 Representation LChunk::LookupLiteralRepresentation(
410     LConstantOperand* operand) const {
411   return graph_->LookupValue(operand->index())->representation();
412 }
413
414
415 static void AddWeakObjectToCodeDependency(Isolate* isolate,
416                                           Handle<HeapObject> object,
417                                           Handle<Code> code) {
418   Handle<WeakCell> cell = Code::WeakCellFor(code);
419   Heap* heap = isolate->heap();
420   Handle<DependentCode> dep(heap->LookupWeakObjectToCodeDependency(object));
421   dep = DependentCode::InsertWeakCode(dep, DependentCode::kWeakCodeGroup, cell);
422   heap->AddWeakObjectToCodeDependency(object, dep);
423 }
424
425
426 void LChunk::RegisterWeakObjectsInOptimizedCode(Handle<Code> code) const {
427   DCHECK(code->is_optimized_code());
428   ZoneList<Handle<Map> > maps(1, zone());
429   ZoneList<Handle<HeapObject> > objects(1, zone());
430   int mode_mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) |
431                   RelocInfo::ModeMask(RelocInfo::CELL);
432   for (RelocIterator it(*code, mode_mask); !it.done(); it.next()) {
433     RelocInfo::Mode mode = it.rinfo()->rmode();
434     if (mode == RelocInfo::CELL &&
435         code->IsWeakObjectInOptimizedCode(it.rinfo()->target_cell())) {
436       objects.Add(Handle<HeapObject>(it.rinfo()->target_cell()), zone());
437     } else if (mode == RelocInfo::EMBEDDED_OBJECT &&
438                code->IsWeakObjectInOptimizedCode(it.rinfo()->target_object())) {
439       if (it.rinfo()->target_object()->IsMap()) {
440         Handle<Map> map(Map::cast(it.rinfo()->target_object()));
441         maps.Add(map, zone());
442       } else {
443         Handle<HeapObject> object(
444             HeapObject::cast(it.rinfo()->target_object()));
445         objects.Add(object, zone());
446       }
447     }
448   }
449   for (int i = 0; i < maps.length(); i++) {
450     if (maps.at(i)->dependent_code()->number_of_entries(
451             DependentCode::kWeakCodeGroup) == 0) {
452       isolate()->heap()->AddRetainedMap(maps.at(i));
453     }
454     Map::AddDependentCode(maps.at(i), DependentCode::kWeakCodeGroup, code);
455   }
456   for (int i = 0; i < objects.length(); i++) {
457     AddWeakObjectToCodeDependency(isolate(), objects.at(i), code);
458   }
459   if (FLAG_enable_ool_constant_pool) {
460     code->constant_pool()->set_weak_object_state(
461         ConstantPoolArray::WEAK_OBJECTS_IN_OPTIMIZED_CODE);
462   }
463   code->set_can_have_weak_objects(true);
464 }
465
466
467 void LChunk::CommitDependencies(Handle<Code> code) const {
468   if (!code->is_optimized_code()) return;
469   HandleScope scope(isolate());
470
471   for (MapSet::const_iterator it = deprecation_dependencies_.begin(),
472        iend = deprecation_dependencies_.end(); it != iend; ++it) {
473     Handle<Map> map = *it;
474     DCHECK(!map->is_deprecated());
475     DCHECK(map->CanBeDeprecated());
476     Map::AddDependentCode(map, DependentCode::kTransitionGroup, code);
477   }
478
479   for (MapSet::const_iterator it = stability_dependencies_.begin(),
480        iend = stability_dependencies_.end(); it != iend; ++it) {
481     Handle<Map> map = *it;
482     DCHECK(map->is_stable());
483     DCHECK(map->CanTransition());
484     Map::AddDependentCode(map, DependentCode::kPrototypeCheckGroup, code);
485   }
486
487   info_->CommitDependencies(code);
488   RegisterWeakObjectsInOptimizedCode(code);
489 }
490
491
492 LChunk* LChunk::NewChunk(HGraph* graph) {
493   DisallowHandleAllocation no_handles;
494   DisallowHeapAllocation no_gc;
495   graph->DisallowAddingNewValues();
496   int values = graph->GetMaximumValueID();
497   CompilationInfo* info = graph->info();
498   if (values > LUnallocated::kMaxVirtualRegisters) {
499     info->AbortOptimization(kNotEnoughVirtualRegistersForValues);
500     return NULL;
501   }
502   LAllocator allocator(values, graph);
503   LChunkBuilder builder(info, graph, &allocator);
504   LChunk* chunk = builder.Build();
505   if (chunk == NULL) return NULL;
506
507   if (!allocator.Allocate(chunk)) {
508     info->AbortOptimization(kNotEnoughVirtualRegistersRegalloc);
509     return NULL;
510   }
511
512   chunk->set_allocated_double_registers(
513       allocator.assigned_double_registers());
514
515   return chunk;
516 }
517
518
519 Handle<Code> LChunk::Codegen() {
520   MacroAssembler assembler(info()->isolate(), NULL, 0);
521   LOG_CODE_EVENT(info()->isolate(),
522                  CodeStartLinePosInfoRecordEvent(
523                      assembler.positions_recorder()));
524   // Code serializer only takes unoptimized code.
525   DCHECK(!info()->will_serialize());
526   LCodeGen generator(this, &assembler, info());
527
528   MarkEmptyBlocks();
529
530   if (generator.GenerateCode()) {
531     generator.CheckEnvironmentUsage();
532     CodeGenerator::MakeCodePrologue(info(), "optimized");
533     Code::Flags flags = info()->flags();
534     Handle<Code> code =
535         CodeGenerator::MakeCodeEpilogue(&assembler, flags, info());
536     generator.FinishCode(code);
537     CommitDependencies(code);
538     code->set_is_crankshafted(true);
539     void* jit_handler_data =
540         assembler.positions_recorder()->DetachJITHandlerData();
541     LOG_CODE_EVENT(info()->isolate(),
542                    CodeEndLinePosInfoRecordEvent(*code, jit_handler_data));
543
544     CodeGenerator::PrintCode(code, info());
545     DCHECK(!(info()->isolate()->serializer_enabled() &&
546              info()->GetMustNotHaveEagerFrame() &&
547              generator.NeedsEagerFrame()));
548     return code;
549   }
550   assembler.AbortedCodeGeneration();
551   return Handle<Code>::null();
552 }
553
554
555 void LChunk::set_allocated_double_registers(BitVector* allocated_registers) {
556   allocated_double_registers_ = allocated_registers;
557   BitVector* doubles = allocated_double_registers();
558   BitVector::Iterator iterator(doubles);
559   while (!iterator.Done()) {
560     if (info()->saves_caller_doubles()) {
561       if (kDoubleSize == kPointerSize * 2) {
562         spill_slot_count_ += 2;
563       } else {
564         spill_slot_count_++;
565       }
566     }
567     iterator.Advance();
568   }
569 }
570
571
572 void LChunkBuilderBase::Abort(BailoutReason reason) {
573   info()->AbortOptimization(reason);
574   status_ = ABORTED;
575 }
576
577
578 void LChunkBuilderBase::Retry(BailoutReason reason) {
579   info()->RetryOptimization(reason);
580   status_ = ABORTED;
581 }
582
583
584 LEnvironment* LChunkBuilderBase::CreateEnvironment(
585     HEnvironment* hydrogen_env, int* argument_index_accumulator,
586     ZoneList<HValue*>* objects_to_materialize) {
587   if (hydrogen_env == NULL) return NULL;
588
589   LEnvironment* outer =
590       CreateEnvironment(hydrogen_env->outer(), argument_index_accumulator,
591                         objects_to_materialize);
592   BailoutId ast_id = hydrogen_env->ast_id();
593   DCHECK(!ast_id.IsNone() ||
594          hydrogen_env->frame_type() != JS_FUNCTION);
595
596   int omitted_count = (hydrogen_env->frame_type() == JS_FUNCTION)
597                           ? 0
598                           : hydrogen_env->specials_count();
599
600   int value_count = hydrogen_env->length() - omitted_count;
601   LEnvironment* result =
602       new(zone()) LEnvironment(hydrogen_env->closure(),
603                                hydrogen_env->frame_type(),
604                                ast_id,
605                                hydrogen_env->parameter_count(),
606                                argument_count_,
607                                value_count,
608                                outer,
609                                hydrogen_env->entry(),
610                                zone());
611   int argument_index = *argument_index_accumulator;
612
613   // Store the environment description into the environment
614   // (with holes for nested objects)
615   for (int i = 0; i < hydrogen_env->length(); ++i) {
616     if (hydrogen_env->is_special_index(i) &&
617         hydrogen_env->frame_type() != JS_FUNCTION) {
618       continue;
619     }
620     LOperand* op;
621     HValue* value = hydrogen_env->values()->at(i);
622     CHECK(!value->IsPushArguments());  // Do not deopt outgoing arguments
623     if (value->IsArgumentsObject() || value->IsCapturedObject()) {
624       op = LEnvironment::materialization_marker();
625     } else {
626       op = UseAny(value);
627     }
628     result->AddValue(op,
629                      value->representation(),
630                      value->CheckFlag(HInstruction::kUint32));
631   }
632
633   // Recursively store the nested objects into the environment
634   for (int i = 0; i < hydrogen_env->length(); ++i) {
635     if (hydrogen_env->is_special_index(i)) continue;
636
637     HValue* value = hydrogen_env->values()->at(i);
638     if (value->IsArgumentsObject() || value->IsCapturedObject()) {
639       AddObjectToMaterialize(value, objects_to_materialize, result);
640     }
641   }
642
643   if (hydrogen_env->frame_type() == JS_FUNCTION) {
644     *argument_index_accumulator = argument_index;
645   }
646
647   return result;
648 }
649
650
651 // Add an object to the supplied environment and object materialization list.
652 //
653 // Notes:
654 //
655 // We are building three lists here:
656 //
657 // 1. In the result->object_mapping_ list (added to by the
658 //    LEnvironment::Add*Object methods), we store the lengths (number
659 //    of fields) of the captured objects in depth-first traversal order, or
660 //    in case of duplicated objects, we store the index to the duplicate object
661 //    (with a tag to differentiate between captured and duplicated objects).
662 //
663 // 2. The object fields are stored in the result->values_ list
664 //    (added to by the LEnvironment.AddValue method) sequentially as lists
665 //    of fields with holes for nested objects (the holes will be expanded
666 //    later by LCodegen::AddToTranslation according to the
667 //    LEnvironment.object_mapping_ list).
668 //
669 // 3. The auxiliary objects_to_materialize array stores the hydrogen values
670 //    in the same order as result->object_mapping_ list. This is used
671 //    to detect duplicate values and calculate the corresponding object index.
672 void LChunkBuilderBase::AddObjectToMaterialize(HValue* value,
673     ZoneList<HValue*>* objects_to_materialize, LEnvironment* result) {
674   int object_index = objects_to_materialize->length();
675   // Store the hydrogen value into the de-duplication array
676   objects_to_materialize->Add(value, zone());
677   // Find out whether we are storing a duplicated value
678   int previously_materialized_object = -1;
679   for (int prev = 0; prev < object_index; ++prev) {
680     if (objects_to_materialize->at(prev) == value) {
681       previously_materialized_object = prev;
682       break;
683     }
684   }
685   // Store the captured object length (or duplicated object index)
686   // into the environment. For duplicated objects, we stop here.
687   int length = value->OperandCount();
688   bool is_arguments = value->IsArgumentsObject();
689   if (previously_materialized_object >= 0) {
690     result->AddDuplicateObject(previously_materialized_object);
691     return;
692   } else {
693     result->AddNewObject(is_arguments ? length - 1 : length, is_arguments);
694   }
695   // Store the captured object's fields into the environment
696   for (int i = is_arguments ? 1 : 0; i < length; ++i) {
697     LOperand* op;
698     HValue* arg_value = value->OperandAt(i);
699     if (arg_value->IsArgumentsObject() || arg_value->IsCapturedObject()) {
700       // Insert a hole for nested objects
701       op = LEnvironment::materialization_marker();
702     } else {
703       DCHECK(!arg_value->IsPushArguments());
704       // For ordinary values, tell the register allocator we need the value
705       // to be alive here
706       op = UseAny(arg_value);
707     }
708     result->AddValue(op,
709                      arg_value->representation(),
710                      arg_value->CheckFlag(HInstruction::kUint32));
711   }
712   // Recursively store all the nested captured objects into the environment
713   for (int i = is_arguments ? 1 : 0; i < length; ++i) {
714     HValue* arg_value = value->OperandAt(i);
715     if (arg_value->IsArgumentsObject() || arg_value->IsCapturedObject()) {
716       AddObjectToMaterialize(arg_value, objects_to_materialize, result);
717     }
718   }
719 }
720
721
722 LPhase::~LPhase() {
723   if (ShouldProduceTraceOutput()) {
724     isolate()->GetHTracer()->TraceLithium(name(), chunk_);
725   }
726 }
727
728
729 } }  // namespace v8::internal