1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
7 #include "src/lithium.h"
8 #include "src/scopes.h"
9 #include "src/serialize.h"
11 #if V8_TARGET_ARCH_IA32
12 #include "src/ia32/lithium-ia32.h" // NOLINT
13 #include "src/ia32/lithium-codegen-ia32.h" // NOLINT
14 #elif V8_TARGET_ARCH_X64
15 #include "src/x64/lithium-x64.h" // NOLINT
16 #include "src/x64/lithium-codegen-x64.h" // NOLINT
17 #elif V8_TARGET_ARCH_ARM
18 #include "src/arm/lithium-arm.h" // NOLINT
19 #include "src/arm/lithium-codegen-arm.h" // NOLINT
20 #elif V8_TARGET_ARCH_MIPS
21 #include "src/mips/lithium-mips.h" // NOLINT
22 #include "src/mips/lithium-codegen-mips.h" // NOLINT
23 #elif V8_TARGET_ARCH_ARM64
24 #include "src/arm64/lithium-arm64.h" // NOLINT
25 #include "src/arm64/lithium-codegen-arm64.h" // NOLINT
26 #elif V8_TARGET_ARCH_MIPS64
27 #include "src/mips64/lithium-mips64.h" // NOLINT
28 #include "src/mips64/lithium-codegen-mips64.h" // NOLINT
29 #elif V8_TARGET_ARCH_X87
30 #include "src/x87/lithium-x87.h" // NOLINT
31 #include "src/x87/lithium-codegen-x87.h" // NOLINT
33 #error "Unknown architecture."
40 void LOperand::PrintTo(StringStream* stream) {
41 LUnallocated* unalloc = NULL;
47 unalloc = LUnallocated::cast(this);
48 stream->Add("v%d", unalloc->virtual_register());
49 if (unalloc->basic_policy() == LUnallocated::FIXED_SLOT) {
50 stream->Add("(=%dS)", unalloc->fixed_slot_index());
53 switch (unalloc->extended_policy()) {
54 case LUnallocated::NONE:
56 case LUnallocated::FIXED_REGISTER: {
57 int reg_index = unalloc->fixed_register_index();
59 reg_index >= Register::kMaxNumAllocatableRegisters) {
60 stream->Add("(=invalid_reg#%d)", reg_index);
62 const char* register_name =
63 Register::AllocationIndexToString(reg_index);
64 stream->Add("(=%s)", register_name);
68 case LUnallocated::FIXED_DOUBLE_REGISTER: {
69 int reg_index = unalloc->fixed_register_index();
71 reg_index >= DoubleRegister::kMaxNumAllocatableRegisters) {
72 stream->Add("(=invalid_double_reg#%d)", reg_index);
74 const char* double_register_name =
75 DoubleRegister::AllocationIndexToString(reg_index);
76 stream->Add("(=%s)", double_register_name);
80 case LUnallocated::MUST_HAVE_REGISTER:
83 case LUnallocated::MUST_HAVE_DOUBLE_REGISTER:
86 case LUnallocated::WRITABLE_REGISTER:
89 case LUnallocated::SAME_AS_FIRST_INPUT:
92 case LUnallocated::ANY:
97 case CONSTANT_OPERAND:
98 stream->Add("[constant:%d]", index());
101 stream->Add("[stack:%d]", index());
103 case DOUBLE_STACK_SLOT:
104 stream->Add("[double_stack:%d]", index());
107 int reg_index = index();
108 if (reg_index < 0 || reg_index >= Register::kMaxNumAllocatableRegisters) {
109 stream->Add("(=invalid_reg#%d|R)", reg_index);
111 stream->Add("[%s|R]", Register::AllocationIndexToString(reg_index));
115 case FLOAT32x4_STACK_SLOT:
116 stream->Add("[float32x4_stack:%d]", index());
118 case FLOAT64x2_STACK_SLOT:
119 stream->Add("[float64x2_stack:%d]", index());
121 case INT32x4_STACK_SLOT:
122 stream->Add("[int32x4_stack:%d]", index());
124 case DOUBLE_REGISTER: {
125 int reg_index = index();
127 reg_index >= DoubleRegister::kMaxNumAllocatableRegisters) {
128 stream->Add("(=invalid_double_reg#%d|R)", reg_index);
130 stream->Add("[%s|R]",
131 DoubleRegister::AllocationIndexToString(reg_index));
135 case FLOAT32x4_REGISTER:
136 stream->Add("[%s|R]",
137 SIMD128Register::AllocationIndexToString(index()));
139 case FLOAT64x2_REGISTER:
140 stream->Add("[%s|R]",
141 SIMD128Register::AllocationIndexToString(index()));
143 case INT32x4_REGISTER:
144 stream->Add("[%s|R]",
145 SIMD128Register::AllocationIndexToString(index()));
151 template<LOperand::Kind kOperandKind, int kNumCachedOperands>
152 LSubKindOperand<kOperandKind, kNumCachedOperands>*
153 LSubKindOperand<kOperandKind, kNumCachedOperands>::cache = NULL;
156 template<LOperand::Kind kOperandKind, int kNumCachedOperands>
157 void LSubKindOperand<kOperandKind, kNumCachedOperands>::SetUpCache() {
159 cache = new LSubKindOperand[kNumCachedOperands];
160 for (int i = 0; i < kNumCachedOperands; i++) {
161 cache[i].ConvertTo(kOperandKind, i);
166 template<LOperand::Kind kOperandKind, int kNumCachedOperands>
167 void LSubKindOperand<kOperandKind, kNumCachedOperands>::TearDownCache() {
172 void LOperand::SetUpCaches() {
173 #define LITHIUM_OPERAND_SETUP(name, type, number) L##name::SetUpCache();
174 LITHIUM_OPERAND_LIST(LITHIUM_OPERAND_SETUP)
175 #undef LITHIUM_OPERAND_SETUP
179 void LOperand::TearDownCaches() {
180 #define LITHIUM_OPERAND_TEARDOWN(name, type, number) L##name::TearDownCache();
181 LITHIUM_OPERAND_LIST(LITHIUM_OPERAND_TEARDOWN)
182 #undef LITHIUM_OPERAND_TEARDOWN
186 bool LParallelMove::IsRedundant() const {
187 for (int i = 0; i < move_operands_.length(); ++i) {
188 if (!move_operands_[i].IsRedundant()) return false;
194 void LParallelMove::PrintDataTo(StringStream* stream) const {
196 for (int i = 0; i < move_operands_.length(); ++i) {
197 if (!move_operands_[i].IsEliminated()) {
198 LOperand* source = move_operands_[i].source();
199 LOperand* destination = move_operands_[i].destination();
200 if (!first) stream->Add(" ");
202 if (source->Equals(destination)) {
203 destination->PrintTo(stream);
205 destination->PrintTo(stream);
207 source->PrintTo(stream);
215 void LEnvironment::PrintTo(StringStream* stream) {
216 stream->Add("[id=%d|", ast_id().ToInt());
217 if (deoptimization_index() != Safepoint::kNoDeoptimizationIndex) {
218 stream->Add("deopt_id=%d|", deoptimization_index());
220 stream->Add("parameters=%d|", parameter_count());
221 stream->Add("arguments_stack_height=%d|", arguments_stack_height());
222 for (int i = 0; i < values_.length(); ++i) {
223 if (i != 0) stream->Add(";");
224 if (values_[i] == NULL) {
225 stream->Add("[hole]");
227 values_[i]->PrintTo(stream);
234 void LPointerMap::RecordPointer(LOperand* op, Zone* zone) {
235 // Do not record arguments as pointers.
236 if (op->IsStackSlot() && op->index() < 0) return;
237 DCHECK(!op->IsDoubleRegister() && !op->IsDoubleStackSlot() &&
238 !op->IsFloat32x4Register() && !op->IsFloat32x4StackSlot() &&
239 !op->IsFloat64x2Register() && !op->IsFloat64x2StackSlot() &&
240 !op->IsInt32x4Register() && !op->IsInt32x4StackSlot());
241 pointer_operands_.Add(op, zone);
245 void LPointerMap::RemovePointer(LOperand* op) {
246 // Do not record arguments as pointers.
247 if (op->IsStackSlot() && op->index() < 0) return;
248 DCHECK(!op->IsDoubleRegister() && !op->IsDoubleStackSlot() &&
249 !op->IsFloat32x4Register() && !op->IsFloat32x4StackSlot() &&
250 !op->IsFloat64x2Register() && !op->IsFloat64x2StackSlot() &&
251 !op->IsInt32x4Register() && !op->IsInt32x4StackSlot());
252 for (int i = 0; i < pointer_operands_.length(); ++i) {
253 if (pointer_operands_[i]->Equals(op)) {
254 pointer_operands_.Remove(i);
261 void LPointerMap::RecordUntagged(LOperand* op, Zone* zone) {
262 // Do not record arguments as pointers.
263 if (op->IsStackSlot() && op->index() < 0) return;
264 DCHECK(!op->IsDoubleRegister() && !op->IsDoubleStackSlot() &&
265 !op->IsFloat32x4Register() && !op->IsFloat32x4StackSlot() &&
266 !op->IsFloat64x2Register() && !op->IsFloat64x2StackSlot() &&
267 !op->IsInt32x4Register() && !op->IsInt32x4StackSlot());
268 untagged_operands_.Add(op, zone);
272 void LPointerMap::PrintTo(StringStream* stream) {
274 for (int i = 0; i < pointer_operands_.length(); ++i) {
275 if (i != 0) stream->Add(";");
276 pointer_operands_[i]->PrintTo(stream);
282 int StackSlotOffset(int index) {
284 // Local or spill slot. Skip the frame pointer, function, and
285 // context in the fixed part of the frame.
286 return -(index + 1) * kPointerSize -
287 StandardFrameConstants::kFixedFrameSizeFromFp;
289 // Incoming parameter. Skip the return address.
290 return -(index + 1) * kPointerSize + kFPOnStackSize + kPCOnStackSize;
295 LChunk::LChunk(CompilationInfo* info, HGraph* graph)
296 : spill_slot_count_(0),
299 instructions_(32, info->zone()),
300 pointer_maps_(8, info->zone()),
301 inlined_closures_(1, info->zone()),
302 deprecation_dependencies_(MapLess(), MapAllocator(info->zone())),
303 stability_dependencies_(MapLess(), MapAllocator(info->zone())) {}
306 LLabel* LChunk::GetLabel(int block_id) const {
307 HBasicBlock* block = graph_->blocks()->at(block_id);
308 int first_instruction = block->first_instruction_index();
309 return LLabel::cast(instructions_[first_instruction]);
313 int LChunk::LookupDestination(int block_id) const {
314 LLabel* cur = GetLabel(block_id);
315 while (cur->replacement() != NULL) {
316 cur = cur->replacement();
318 return cur->block_id();
321 Label* LChunk::GetAssemblyLabel(int block_id) const {
322 LLabel* label = GetLabel(block_id);
323 DCHECK(!label->HasReplacement());
324 return label->label();
328 void LChunk::MarkEmptyBlocks() {
329 LPhase phase("L_Mark empty blocks", this);
330 for (int i = 0; i < graph()->blocks()->length(); ++i) {
331 HBasicBlock* block = graph()->blocks()->at(i);
332 int first = block->first_instruction_index();
333 int last = block->last_instruction_index();
334 LInstruction* first_instr = instructions()->at(first);
335 LInstruction* last_instr = instructions()->at(last);
337 LLabel* label = LLabel::cast(first_instr);
338 if (last_instr->IsGoto()) {
339 LGoto* goto_instr = LGoto::cast(last_instr);
340 if (label->IsRedundant() &&
341 !label->is_loop_header()) {
342 bool can_eliminate = true;
343 for (int i = first + 1; i < last && can_eliminate; ++i) {
344 LInstruction* cur = instructions()->at(i);
346 LGap* gap = LGap::cast(cur);
347 if (!gap->IsRedundant()) {
348 can_eliminate = false;
351 can_eliminate = false;
355 label->set_replacement(GetLabel(goto_instr->block_id()));
363 void LChunk::AddInstruction(LInstruction* instr, HBasicBlock* block) {
364 LInstructionGap* gap = new (zone()) LInstructionGap(block);
365 gap->set_hydrogen_value(instr->hydrogen_value());
367 if (instr->IsControl()) {
368 instructions_.Add(gap, zone());
369 index = instructions_.length();
370 instructions_.Add(instr, zone());
372 index = instructions_.length();
373 instructions_.Add(instr, zone());
374 instructions_.Add(gap, zone());
376 if (instr->HasPointerMap()) {
377 pointer_maps_.Add(instr->pointer_map(), zone());
378 instr->pointer_map()->set_lithium_position(index);
383 LConstantOperand* LChunk::DefineConstantOperand(HConstant* constant) {
384 return LConstantOperand::Create(constant->id(), zone());
388 int LChunk::GetParameterStackSlot(int index) const {
389 // The receiver is at index 0, the first parameter at index 1, so we
390 // shift all parameter indexes down by the number of parameters, and
391 // make sure they end up negative so they are distinguishable from
393 int result = index - info()->num_parameters() - 1;
400 // A parameter relative to ebp in the arguments stub.
401 int LChunk::ParameterAt(int index) {
402 DCHECK(-1 <= index); // -1 is the receiver.
403 return (1 + info()->scope()->num_parameters() - index) *
408 LGap* LChunk::GetGapAt(int index) const {
409 return LGap::cast(instructions_[index]);
413 bool LChunk::IsGapAt(int index) const {
414 return instructions_[index]->IsGap();
418 int LChunk::NearestGapPos(int index) const {
419 while (!IsGapAt(index)) index--;
424 void LChunk::AddGapMove(int index, LOperand* from, LOperand* to) {
425 GetGapAt(index)->GetOrCreateParallelMove(
426 LGap::START, zone())->AddMove(from, to, zone());
430 HConstant* LChunk::LookupConstant(LConstantOperand* operand) const {
431 return HConstant::cast(graph_->LookupValue(operand->index()));
435 Representation LChunk::LookupLiteralRepresentation(
436 LConstantOperand* operand) const {
437 return graph_->LookupValue(operand->index())->representation();
441 void LChunk::CommitDependencies(Handle<Code> code) const {
442 for (MapSet::const_iterator it = deprecation_dependencies_.begin(),
443 iend = deprecation_dependencies_.end(); it != iend; ++it) {
444 Handle<Map> map = *it;
445 DCHECK(!map->is_deprecated());
446 DCHECK(map->CanBeDeprecated());
447 Map::AddDependentCode(map, DependentCode::kTransitionGroup, code);
450 for (MapSet::const_iterator it = stability_dependencies_.begin(),
451 iend = stability_dependencies_.end(); it != iend; ++it) {
452 Handle<Map> map = *it;
453 DCHECK(map->is_stable());
454 DCHECK(map->CanTransition());
455 Map::AddDependentCode(map, DependentCode::kPrototypeCheckGroup, code);
458 info_->CommitDependencies(code);
462 LChunk* LChunk::NewChunk(HGraph* graph) {
463 DisallowHandleAllocation no_handles;
464 DisallowHeapAllocation no_gc;
465 graph->DisallowAddingNewValues();
466 int values = graph->GetMaximumValueID();
467 CompilationInfo* info = graph->info();
468 if (values > LUnallocated::kMaxVirtualRegisters) {
469 info->set_bailout_reason(kNotEnoughVirtualRegistersForValues);
472 LAllocator allocator(values, graph);
473 LChunkBuilder builder(info, graph, &allocator);
474 LChunk* chunk = builder.Build();
475 if (chunk == NULL) return NULL;
477 if (!allocator.Allocate(chunk)) {
478 info->set_bailout_reason(kNotEnoughVirtualRegistersRegalloc);
482 chunk->set_allocated_double_registers(
483 allocator.assigned_double_registers());
489 Handle<Code> LChunk::Codegen() {
490 MacroAssembler assembler(info()->isolate(), NULL, 0);
491 LOG_CODE_EVENT(info()->isolate(),
492 CodeStartLinePosInfoRecordEvent(
493 assembler.positions_recorder()));
494 // TODO(yangguo) remove this once the code serializer handles code stubs.
495 if (info()->will_serialize()) assembler.enable_serializer();
496 LCodeGen generator(this, &assembler, info());
500 if (generator.GenerateCode()) {
501 generator.CheckEnvironmentUsage();
502 CodeGenerator::MakeCodePrologue(info(), "optimized");
503 Code::Flags flags = info()->flags();
505 CodeGenerator::MakeCodeEpilogue(&assembler, flags, info());
506 generator.FinishCode(code);
507 CommitDependencies(code);
508 code->set_is_crankshafted(true);
509 void* jit_handler_data =
510 assembler.positions_recorder()->DetachJITHandlerData();
511 LOG_CODE_EVENT(info()->isolate(),
512 CodeEndLinePosInfoRecordEvent(*code, jit_handler_data));
514 CodeGenerator::PrintCode(code, info());
515 DCHECK(!(info()->isolate()->serializer_enabled() &&
516 info()->GetMustNotHaveEagerFrame() &&
517 generator.NeedsEagerFrame()));
520 assembler.AbortedCodeGeneration();
521 return Handle<Code>::null();
525 void LChunk::set_allocated_double_registers(BitVector* allocated_registers) {
526 allocated_double_registers_ = allocated_registers;
527 BitVector* doubles = allocated_double_registers();
528 BitVector::Iterator iterator(doubles);
529 while (!iterator.Done()) {
530 if (info()->saves_caller_doubles()) {
531 if (kDoubleSize == kPointerSize * 2) {
532 spill_slot_count_ += 2;
542 LEnvironment* LChunkBuilderBase::CreateEnvironment(
543 HEnvironment* hydrogen_env,
544 int* argument_index_accumulator,
545 ZoneList<HValue*>* objects_to_materialize) {
546 if (hydrogen_env == NULL) return NULL;
548 LEnvironment* outer = CreateEnvironment(hydrogen_env->outer(),
549 argument_index_accumulator,
550 objects_to_materialize);
551 BailoutId ast_id = hydrogen_env->ast_id();
552 DCHECK(!ast_id.IsNone() ||
553 hydrogen_env->frame_type() != JS_FUNCTION);
554 int value_count = hydrogen_env->length() - hydrogen_env->specials_count();
555 LEnvironment* result =
556 new(zone()) LEnvironment(hydrogen_env->closure(),
557 hydrogen_env->frame_type(),
559 hydrogen_env->parameter_count(),
563 hydrogen_env->entry(),
565 int argument_index = *argument_index_accumulator;
567 // Store the environment description into the environment
568 // (with holes for nested objects)
569 for (int i = 0; i < hydrogen_env->length(); ++i) {
570 if (hydrogen_env->is_special_index(i)) continue;
573 HValue* value = hydrogen_env->values()->at(i);
574 CHECK(!value->IsPushArguments()); // Do not deopt outgoing arguments
575 if (value->IsArgumentsObject() || value->IsCapturedObject()) {
576 op = LEnvironment::materialization_marker();
581 value->representation(),
582 value->CheckFlag(HInstruction::kUint32));
585 // Recursively store the nested objects into the environment
586 for (int i = 0; i < hydrogen_env->length(); ++i) {
587 if (hydrogen_env->is_special_index(i)) continue;
589 HValue* value = hydrogen_env->values()->at(i);
590 if (value->IsArgumentsObject() || value->IsCapturedObject()) {
591 AddObjectToMaterialize(value, objects_to_materialize, result);
595 if (hydrogen_env->frame_type() == JS_FUNCTION) {
596 *argument_index_accumulator = argument_index;
603 // Add an object to the supplied environment and object materialization list.
607 // We are building three lists here:
609 // 1. In the result->object_mapping_ list (added to by the
610 // LEnvironment::Add*Object methods), we store the lengths (number
611 // of fields) of the captured objects in depth-first traversal order, or
612 // in case of duplicated objects, we store the index to the duplicate object
613 // (with a tag to differentiate between captured and duplicated objects).
615 // 2. The object fields are stored in the result->values_ list
616 // (added to by the LEnvironment.AddValue method) sequentially as lists
617 // of fields with holes for nested objects (the holes will be expanded
618 // later by LCodegen::AddToTranslation according to the
619 // LEnvironment.object_mapping_ list).
621 // 3. The auxiliary objects_to_materialize array stores the hydrogen values
622 // in the same order as result->object_mapping_ list. This is used
623 // to detect duplicate values and calculate the corresponding object index.
624 void LChunkBuilderBase::AddObjectToMaterialize(HValue* value,
625 ZoneList<HValue*>* objects_to_materialize, LEnvironment* result) {
626 int object_index = objects_to_materialize->length();
627 // Store the hydrogen value into the de-duplication array
628 objects_to_materialize->Add(value, zone());
629 // Find out whether we are storing a duplicated value
630 int previously_materialized_object = -1;
631 for (int prev = 0; prev < object_index; ++prev) {
632 if (objects_to_materialize->at(prev) == value) {
633 previously_materialized_object = prev;
637 // Store the captured object length (or duplicated object index)
638 // into the environment. For duplicated objects, we stop here.
639 int length = value->OperandCount();
640 bool is_arguments = value->IsArgumentsObject();
641 if (previously_materialized_object >= 0) {
642 result->AddDuplicateObject(previously_materialized_object);
645 result->AddNewObject(is_arguments ? length - 1 : length, is_arguments);
647 // Store the captured object's fields into the environment
648 for (int i = is_arguments ? 1 : 0; i < length; ++i) {
650 HValue* arg_value = value->OperandAt(i);
651 if (arg_value->IsArgumentsObject() || arg_value->IsCapturedObject()) {
652 // Insert a hole for nested objects
653 op = LEnvironment::materialization_marker();
655 DCHECK(!arg_value->IsPushArguments());
656 // For ordinary values, tell the register allocator we need the value
658 op = UseAny(arg_value);
661 arg_value->representation(),
662 arg_value->CheckFlag(HInstruction::kUint32));
664 // Recursively store all the nested captured objects into the environment
665 for (int i = is_arguments ? 1 : 0; i < length; ++i) {
666 HValue* arg_value = value->OperandAt(i);
667 if (arg_value->IsArgumentsObject() || arg_value->IsCapturedObject()) {
668 AddObjectToMaterialize(arg_value, objects_to_materialize, result);
675 if (ShouldProduceTraceOutput()) {
676 isolate()->GetHTracer()->TraceLithium(name(), chunk_);
681 } } // namespace v8::internal