1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #include "src/lithium.h"
9 #include "src/scopes.h"
11 #if V8_TARGET_ARCH_IA32
12 #include "src/ia32/lithium-ia32.h" // NOLINT
13 #include "src/ia32/lithium-codegen-ia32.h" // NOLINT
14 #elif V8_TARGET_ARCH_X64
15 #include "src/x64/lithium-x64.h" // NOLINT
16 #include "src/x64/lithium-codegen-x64.h" // NOLINT
17 #elif V8_TARGET_ARCH_ARM
18 #include "src/arm/lithium-arm.h" // NOLINT
19 #include "src/arm/lithium-codegen-arm.h" // NOLINT
20 #elif V8_TARGET_ARCH_PPC
21 #include "src/ppc/lithium-ppc.h" // NOLINT
22 #include "src/ppc/lithium-codegen-ppc.h" // NOLINT
23 #elif V8_TARGET_ARCH_MIPS
24 #include "src/mips/lithium-mips.h" // NOLINT
25 #include "src/mips/lithium-codegen-mips.h" // NOLINT
26 #elif V8_TARGET_ARCH_ARM64
27 #include "src/arm64/lithium-arm64.h" // NOLINT
28 #include "src/arm64/lithium-codegen-arm64.h" // NOLINT
29 #elif V8_TARGET_ARCH_MIPS64
30 #include "src/mips64/lithium-mips64.h" // NOLINT
31 #include "src/mips64/lithium-codegen-mips64.h" // NOLINT
32 #elif V8_TARGET_ARCH_X87
33 #include "src/x87/lithium-x87.h" // NOLINT
34 #include "src/x87/lithium-codegen-x87.h" // NOLINT
36 #error "Unknown architecture."
43 void LOperand::PrintTo(StringStream* stream) {
44 LUnallocated* unalloc = NULL;
50 unalloc = LUnallocated::cast(this);
51 stream->Add("v%d", unalloc->virtual_register());
52 if (unalloc->basic_policy() == LUnallocated::FIXED_SLOT) {
53 stream->Add("(=%dS)", unalloc->fixed_slot_index());
56 switch (unalloc->extended_policy()) {
57 case LUnallocated::NONE:
59 case LUnallocated::FIXED_REGISTER: {
60 int reg_index = unalloc->fixed_register_index();
62 reg_index >= Register::kMaxNumAllocatableRegisters) {
63 stream->Add("(=invalid_reg#%d)", reg_index);
65 const char* register_name =
66 Register::AllocationIndexToString(reg_index);
67 stream->Add("(=%s)", register_name);
71 case LUnallocated::FIXED_DOUBLE_REGISTER: {
72 int reg_index = unalloc->fixed_register_index();
74 reg_index >= DoubleRegister::kMaxNumAllocatableRegisters) {
75 stream->Add("(=invalid_double_reg#%d)", reg_index);
77 const char* double_register_name =
78 DoubleRegister::AllocationIndexToString(reg_index);
79 stream->Add("(=%s)", double_register_name);
83 case LUnallocated::MUST_HAVE_REGISTER:
86 case LUnallocated::MUST_HAVE_DOUBLE_REGISTER:
89 case LUnallocated::WRITABLE_REGISTER:
92 case LUnallocated::SAME_AS_FIRST_INPUT:
95 case LUnallocated::ANY:
100 case CONSTANT_OPERAND:
101 stream->Add("[constant:%d]", index());
104 stream->Add("[stack:%d]", index());
106 case DOUBLE_STACK_SLOT:
107 stream->Add("[double_stack:%d]", index());
110 int reg_index = index();
111 if (reg_index < 0 || reg_index >= Register::kMaxNumAllocatableRegisters) {
112 stream->Add("(=invalid_reg#%d|R)", reg_index);
114 stream->Add("[%s|R]", Register::AllocationIndexToString(reg_index));
118 case DOUBLE_REGISTER: {
119 int reg_index = index();
121 reg_index >= DoubleRegister::kMaxNumAllocatableRegisters) {
122 stream->Add("(=invalid_double_reg#%d|R)", reg_index);
124 stream->Add("[%s|R]",
125 DoubleRegister::AllocationIndexToString(reg_index));
133 template<LOperand::Kind kOperandKind, int kNumCachedOperands>
134 LSubKindOperand<kOperandKind, kNumCachedOperands>*
135 LSubKindOperand<kOperandKind, kNumCachedOperands>::cache = NULL;
138 template<LOperand::Kind kOperandKind, int kNumCachedOperands>
139 void LSubKindOperand<kOperandKind, kNumCachedOperands>::SetUpCache() {
141 cache = new LSubKindOperand[kNumCachedOperands];
142 for (int i = 0; i < kNumCachedOperands; i++) {
143 cache[i].ConvertTo(kOperandKind, i);
148 template<LOperand::Kind kOperandKind, int kNumCachedOperands>
149 void LSubKindOperand<kOperandKind, kNumCachedOperands>::TearDownCache() {
155 void LOperand::SetUpCaches() {
156 #define LITHIUM_OPERAND_SETUP(name, type, number) L##name::SetUpCache();
157 LITHIUM_OPERAND_LIST(LITHIUM_OPERAND_SETUP)
158 #undef LITHIUM_OPERAND_SETUP
162 void LOperand::TearDownCaches() {
163 #define LITHIUM_OPERAND_TEARDOWN(name, type, number) L##name::TearDownCache();
164 LITHIUM_OPERAND_LIST(LITHIUM_OPERAND_TEARDOWN)
165 #undef LITHIUM_OPERAND_TEARDOWN
169 bool LParallelMove::IsRedundant() const {
170 for (int i = 0; i < move_operands_.length(); ++i) {
171 if (!move_operands_[i].IsRedundant()) return false;
177 void LParallelMove::PrintDataTo(StringStream* stream) const {
179 for (int i = 0; i < move_operands_.length(); ++i) {
180 if (!move_operands_[i].IsEliminated()) {
181 LOperand* source = move_operands_[i].source();
182 LOperand* destination = move_operands_[i].destination();
183 if (!first) stream->Add(" ");
185 if (source->Equals(destination)) {
186 destination->PrintTo(stream);
188 destination->PrintTo(stream);
190 source->PrintTo(stream);
198 void LEnvironment::PrintTo(StringStream* stream) {
199 stream->Add("[id=%d|", ast_id().ToInt());
200 if (deoptimization_index() != Safepoint::kNoDeoptimizationIndex) {
201 stream->Add("deopt_id=%d|", deoptimization_index());
203 stream->Add("parameters=%d|", parameter_count());
204 stream->Add("arguments_stack_height=%d|", arguments_stack_height());
205 for (int i = 0; i < values_.length(); ++i) {
206 if (i != 0) stream->Add(";");
207 if (values_[i] == NULL) {
208 stream->Add("[hole]");
210 values_[i]->PrintTo(stream);
217 void LPointerMap::RecordPointer(LOperand* op, Zone* zone) {
218 // Do not record arguments as pointers.
219 if (op->IsStackSlot() && op->index() < 0) return;
220 DCHECK(!op->IsDoubleRegister() && !op->IsDoubleStackSlot());
221 pointer_operands_.Add(op, zone);
225 void LPointerMap::RemovePointer(LOperand* op) {
226 // Do not record arguments as pointers.
227 if (op->IsStackSlot() && op->index() < 0) return;
228 DCHECK(!op->IsDoubleRegister() && !op->IsDoubleStackSlot());
229 for (int i = 0; i < pointer_operands_.length(); ++i) {
230 if (pointer_operands_[i]->Equals(op)) {
231 pointer_operands_.Remove(i);
238 void LPointerMap::RecordUntagged(LOperand* op, Zone* zone) {
239 // Do not record arguments as pointers.
240 if (op->IsStackSlot() && op->index() < 0) return;
241 DCHECK(!op->IsDoubleRegister() && !op->IsDoubleStackSlot());
242 untagged_operands_.Add(op, zone);
246 void LPointerMap::PrintTo(StringStream* stream) {
248 for (int i = 0; i < pointer_operands_.length(); ++i) {
249 if (i != 0) stream->Add(";");
250 pointer_operands_[i]->PrintTo(stream);
256 int StackSlotOffset(int index) {
258 // Local or spill slot. Skip the frame pointer, function, and
259 // context in the fixed part of the frame.
260 return -(index + 1) * kPointerSize -
261 StandardFrameConstants::kFixedFrameSizeFromFp;
263 // Incoming parameter. Skip the return address.
264 return -(index + 1) * kPointerSize + kFPOnStackSize + kPCOnStackSize;
269 LChunk::LChunk(CompilationInfo* info, HGraph* graph)
270 : spill_slot_count_(0),
273 instructions_(32, info->zone()),
274 pointer_maps_(8, info->zone()),
275 inlined_closures_(1, info->zone()),
276 deprecation_dependencies_(MapLess(), MapAllocator(info->zone())),
277 stability_dependencies_(MapLess(), MapAllocator(info->zone())) {}
280 LLabel* LChunk::GetLabel(int block_id) const {
281 HBasicBlock* block = graph_->blocks()->at(block_id);
282 int first_instruction = block->first_instruction_index();
283 return LLabel::cast(instructions_[first_instruction]);
287 int LChunk::LookupDestination(int block_id) const {
288 LLabel* cur = GetLabel(block_id);
289 while (cur->replacement() != NULL) {
290 cur = cur->replacement();
292 return cur->block_id();
295 Label* LChunk::GetAssemblyLabel(int block_id) const {
296 LLabel* label = GetLabel(block_id);
297 DCHECK(!label->HasReplacement());
298 return label->label();
302 void LChunk::MarkEmptyBlocks() {
303 LPhase phase("L_Mark empty blocks", this);
304 for (int i = 0; i < graph()->blocks()->length(); ++i) {
305 HBasicBlock* block = graph()->blocks()->at(i);
306 int first = block->first_instruction_index();
307 int last = block->last_instruction_index();
308 LInstruction* first_instr = instructions()->at(first);
309 LInstruction* last_instr = instructions()->at(last);
311 LLabel* label = LLabel::cast(first_instr);
312 if (last_instr->IsGoto()) {
313 LGoto* goto_instr = LGoto::cast(last_instr);
314 if (label->IsRedundant() &&
315 !label->is_loop_header()) {
316 bool can_eliminate = true;
317 for (int i = first + 1; i < last && can_eliminate; ++i) {
318 LInstruction* cur = instructions()->at(i);
320 LGap* gap = LGap::cast(cur);
321 if (!gap->IsRedundant()) {
322 can_eliminate = false;
325 can_eliminate = false;
329 label->set_replacement(GetLabel(goto_instr->block_id()));
337 void LChunk::AddInstruction(LInstruction* instr, HBasicBlock* block) {
338 LInstructionGap* gap = new (zone()) LInstructionGap(block);
339 gap->set_hydrogen_value(instr->hydrogen_value());
341 if (instr->IsControl()) {
342 instructions_.Add(gap, zone());
343 index = instructions_.length();
344 instructions_.Add(instr, zone());
346 index = instructions_.length();
347 instructions_.Add(instr, zone());
348 instructions_.Add(gap, zone());
350 if (instr->HasPointerMap()) {
351 pointer_maps_.Add(instr->pointer_map(), zone());
352 instr->pointer_map()->set_lithium_position(index);
357 LConstantOperand* LChunk::DefineConstantOperand(HConstant* constant) {
358 return LConstantOperand::Create(constant->id(), zone());
362 int LChunk::GetParameterStackSlot(int index) const {
363 // The receiver is at index 0, the first parameter at index 1, so we
364 // shift all parameter indexes down by the number of parameters, and
365 // make sure they end up negative so they are distinguishable from
367 int result = index - info()->num_parameters() - 1;
374 // A parameter relative to ebp in the arguments stub.
375 int LChunk::ParameterAt(int index) {
376 DCHECK(-1 <= index); // -1 is the receiver.
377 return (1 + info()->scope()->num_parameters() - index) *
382 LGap* LChunk::GetGapAt(int index) const {
383 return LGap::cast(instructions_[index]);
387 bool LChunk::IsGapAt(int index) const {
388 return instructions_[index]->IsGap();
392 int LChunk::NearestGapPos(int index) const {
393 while (!IsGapAt(index)) index--;
398 void LChunk::AddGapMove(int index, LOperand* from, LOperand* to) {
399 GetGapAt(index)->GetOrCreateParallelMove(
400 LGap::START, zone())->AddMove(from, to, zone());
404 HConstant* LChunk::LookupConstant(LConstantOperand* operand) const {
405 return HConstant::cast(graph_->LookupValue(operand->index()));
409 Representation LChunk::LookupLiteralRepresentation(
410 LConstantOperand* operand) const {
411 return graph_->LookupValue(operand->index())->representation();
415 static void AddWeakObjectToCodeDependency(Isolate* isolate,
416 Handle<HeapObject> object,
418 Handle<WeakCell> cell = Code::WeakCellFor(code);
419 Heap* heap = isolate->heap();
420 Handle<DependentCode> dep(heap->LookupWeakObjectToCodeDependency(object));
421 dep = DependentCode::InsertWeakCode(dep, DependentCode::kWeakCodeGroup, cell);
422 heap->AddWeakObjectToCodeDependency(object, dep);
426 void LChunk::RegisterWeakObjectsInOptimizedCode(Handle<Code> code) const {
427 DCHECK(code->is_optimized_code());
428 ZoneList<Handle<Map> > maps(1, zone());
429 ZoneList<Handle<HeapObject> > objects(1, zone());
430 int mode_mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) |
431 RelocInfo::ModeMask(RelocInfo::CELL);
432 for (RelocIterator it(*code, mode_mask); !it.done(); it.next()) {
433 RelocInfo::Mode mode = it.rinfo()->rmode();
434 if (mode == RelocInfo::CELL &&
435 code->IsWeakObjectInOptimizedCode(it.rinfo()->target_cell())) {
436 objects.Add(Handle<HeapObject>(it.rinfo()->target_cell()), zone());
437 } else if (mode == RelocInfo::EMBEDDED_OBJECT &&
438 code->IsWeakObjectInOptimizedCode(it.rinfo()->target_object())) {
439 if (it.rinfo()->target_object()->IsMap()) {
440 Handle<Map> map(Map::cast(it.rinfo()->target_object()));
441 maps.Add(map, zone());
443 Handle<HeapObject> object(
444 HeapObject::cast(it.rinfo()->target_object()));
445 objects.Add(object, zone());
449 for (int i = 0; i < maps.length(); i++) {
450 if (maps.at(i)->dependent_code()->number_of_entries(
451 DependentCode::kWeakCodeGroup) == 0) {
452 isolate()->heap()->AddRetainedMap(maps.at(i));
454 Map::AddDependentCode(maps.at(i), DependentCode::kWeakCodeGroup, code);
456 for (int i = 0; i < objects.length(); i++) {
457 AddWeakObjectToCodeDependency(isolate(), objects.at(i), code);
459 if (FLAG_enable_ool_constant_pool) {
460 code->constant_pool()->set_weak_object_state(
461 ConstantPoolArray::WEAK_OBJECTS_IN_OPTIMIZED_CODE);
463 code->set_can_have_weak_objects(true);
467 void LChunk::CommitDependencies(Handle<Code> code) const {
468 if (!code->is_optimized_code()) return;
469 HandleScope scope(isolate());
471 for (MapSet::const_iterator it = deprecation_dependencies_.begin(),
472 iend = deprecation_dependencies_.end(); it != iend; ++it) {
473 Handle<Map> map = *it;
474 DCHECK(!map->is_deprecated());
475 DCHECK(map->CanBeDeprecated());
476 Map::AddDependentCode(map, DependentCode::kTransitionGroup, code);
479 for (MapSet::const_iterator it = stability_dependencies_.begin(),
480 iend = stability_dependencies_.end(); it != iend; ++it) {
481 Handle<Map> map = *it;
482 DCHECK(map->is_stable());
483 DCHECK(map->CanTransition());
484 Map::AddDependentCode(map, DependentCode::kPrototypeCheckGroup, code);
487 info_->CommitDependencies(code);
488 RegisterWeakObjectsInOptimizedCode(code);
492 LChunk* LChunk::NewChunk(HGraph* graph) {
493 DisallowHandleAllocation no_handles;
494 DisallowHeapAllocation no_gc;
495 graph->DisallowAddingNewValues();
496 int values = graph->GetMaximumValueID();
497 CompilationInfo* info = graph->info();
498 if (values > LUnallocated::kMaxVirtualRegisters) {
499 info->AbortOptimization(kNotEnoughVirtualRegistersForValues);
502 LAllocator allocator(values, graph);
503 LChunkBuilder builder(info, graph, &allocator);
504 LChunk* chunk = builder.Build();
505 if (chunk == NULL) return NULL;
507 if (!allocator.Allocate(chunk)) {
508 info->AbortOptimization(kNotEnoughVirtualRegistersRegalloc);
512 chunk->set_allocated_double_registers(
513 allocator.assigned_double_registers());
519 Handle<Code> LChunk::Codegen() {
520 MacroAssembler assembler(info()->isolate(), NULL, 0);
521 LOG_CODE_EVENT(info()->isolate(),
522 CodeStartLinePosInfoRecordEvent(
523 assembler.positions_recorder()));
524 // Code serializer only takes unoptimized code.
525 DCHECK(!info()->will_serialize());
526 LCodeGen generator(this, &assembler, info());
530 if (generator.GenerateCode()) {
531 generator.CheckEnvironmentUsage();
532 CodeGenerator::MakeCodePrologue(info(), "optimized");
533 Code::Flags flags = info()->flags();
535 CodeGenerator::MakeCodeEpilogue(&assembler, flags, info());
536 generator.FinishCode(code);
537 CommitDependencies(code);
538 code->set_is_crankshafted(true);
539 void* jit_handler_data =
540 assembler.positions_recorder()->DetachJITHandlerData();
541 LOG_CODE_EVENT(info()->isolate(),
542 CodeEndLinePosInfoRecordEvent(*code, jit_handler_data));
544 CodeGenerator::PrintCode(code, info());
545 DCHECK(!(info()->isolate()->serializer_enabled() &&
546 info()->GetMustNotHaveEagerFrame() &&
547 generator.NeedsEagerFrame()));
550 assembler.AbortedCodeGeneration();
551 return Handle<Code>::null();
555 void LChunk::set_allocated_double_registers(BitVector* allocated_registers) {
556 allocated_double_registers_ = allocated_registers;
557 BitVector* doubles = allocated_double_registers();
558 BitVector::Iterator iterator(doubles);
559 while (!iterator.Done()) {
560 if (info()->saves_caller_doubles()) {
561 if (kDoubleSize == kPointerSize * 2) {
562 spill_slot_count_ += 2;
572 void LChunkBuilderBase::Abort(BailoutReason reason) {
573 info()->AbortOptimization(reason);
578 void LChunkBuilderBase::Retry(BailoutReason reason) {
579 info()->RetryOptimization(reason);
584 LEnvironment* LChunkBuilderBase::CreateEnvironment(
585 HEnvironment* hydrogen_env, int* argument_index_accumulator,
586 ZoneList<HValue*>* objects_to_materialize) {
587 if (hydrogen_env == NULL) return NULL;
589 LEnvironment* outer =
590 CreateEnvironment(hydrogen_env->outer(), argument_index_accumulator,
591 objects_to_materialize);
592 BailoutId ast_id = hydrogen_env->ast_id();
593 DCHECK(!ast_id.IsNone() ||
594 hydrogen_env->frame_type() != JS_FUNCTION);
596 int omitted_count = (hydrogen_env->frame_type() == JS_FUNCTION)
598 : hydrogen_env->specials_count();
600 int value_count = hydrogen_env->length() - omitted_count;
601 LEnvironment* result =
602 new(zone()) LEnvironment(hydrogen_env->closure(),
603 hydrogen_env->frame_type(),
605 hydrogen_env->parameter_count(),
609 hydrogen_env->entry(),
611 int argument_index = *argument_index_accumulator;
613 // Store the environment description into the environment
614 // (with holes for nested objects)
615 for (int i = 0; i < hydrogen_env->length(); ++i) {
616 if (hydrogen_env->is_special_index(i) &&
617 hydrogen_env->frame_type() != JS_FUNCTION) {
621 HValue* value = hydrogen_env->values()->at(i);
622 CHECK(!value->IsPushArguments()); // Do not deopt outgoing arguments
623 if (value->IsArgumentsObject() || value->IsCapturedObject()) {
624 op = LEnvironment::materialization_marker();
629 value->representation(),
630 value->CheckFlag(HInstruction::kUint32));
633 // Recursively store the nested objects into the environment
634 for (int i = 0; i < hydrogen_env->length(); ++i) {
635 if (hydrogen_env->is_special_index(i)) continue;
637 HValue* value = hydrogen_env->values()->at(i);
638 if (value->IsArgumentsObject() || value->IsCapturedObject()) {
639 AddObjectToMaterialize(value, objects_to_materialize, result);
643 if (hydrogen_env->frame_type() == JS_FUNCTION) {
644 *argument_index_accumulator = argument_index;
651 // Add an object to the supplied environment and object materialization list.
655 // We are building three lists here:
657 // 1. In the result->object_mapping_ list (added to by the
658 // LEnvironment::Add*Object methods), we store the lengths (number
659 // of fields) of the captured objects in depth-first traversal order, or
660 // in case of duplicated objects, we store the index to the duplicate object
661 // (with a tag to differentiate between captured and duplicated objects).
663 // 2. The object fields are stored in the result->values_ list
664 // (added to by the LEnvironment.AddValue method) sequentially as lists
665 // of fields with holes for nested objects (the holes will be expanded
666 // later by LCodegen::AddToTranslation according to the
667 // LEnvironment.object_mapping_ list).
669 // 3. The auxiliary objects_to_materialize array stores the hydrogen values
670 // in the same order as result->object_mapping_ list. This is used
671 // to detect duplicate values and calculate the corresponding object index.
672 void LChunkBuilderBase::AddObjectToMaterialize(HValue* value,
673 ZoneList<HValue*>* objects_to_materialize, LEnvironment* result) {
674 int object_index = objects_to_materialize->length();
675 // Store the hydrogen value into the de-duplication array
676 objects_to_materialize->Add(value, zone());
677 // Find out whether we are storing a duplicated value
678 int previously_materialized_object = -1;
679 for (int prev = 0; prev < object_index; ++prev) {
680 if (objects_to_materialize->at(prev) == value) {
681 previously_materialized_object = prev;
685 // Store the captured object length (or duplicated object index)
686 // into the environment. For duplicated objects, we stop here.
687 int length = value->OperandCount();
688 bool is_arguments = value->IsArgumentsObject();
689 if (previously_materialized_object >= 0) {
690 result->AddDuplicateObject(previously_materialized_object);
693 result->AddNewObject(is_arguments ? length - 1 : length, is_arguments);
695 // Store the captured object's fields into the environment
696 for (int i = is_arguments ? 1 : 0; i < length; ++i) {
698 HValue* arg_value = value->OperandAt(i);
699 if (arg_value->IsArgumentsObject() || arg_value->IsCapturedObject()) {
700 // Insert a hole for nested objects
701 op = LEnvironment::materialization_marker();
703 DCHECK(!arg_value->IsPushArguments());
704 // For ordinary values, tell the register allocator we need the value
706 op = UseAny(arg_value);
709 arg_value->representation(),
710 arg_value->CheckFlag(HInstruction::kUint32));
712 // Recursively store all the nested captured objects into the environment
713 for (int i = is_arguments ? 1 : 0; i < length; ++i) {
714 HValue* arg_value = value->OperandAt(i);
715 if (arg_value->IsArgumentsObject() || arg_value->IsCapturedObject()) {
716 AddObjectToMaterialize(arg_value, objects_to_materialize, result);
723 if (ShouldProduceTraceOutput()) {
724 isolate()->GetHTracer()->TraceLithium(name(), chunk_);
729 } } // namespace v8::internal