1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #include "src/compiler/code-generator.h"
7 #include "src/compiler/code-generator-impl.h"
8 #include "src/compiler/linkage.h"
9 #include "src/compiler/pipeline.h"
15 class CodeGenerator::JumpTable FINAL : public ZoneObject {
17 JumpTable(JumpTable* next, Label** targets, size_t target_count)
18 : next_(next), targets_(targets), target_count_(target_count) {}
20 Label* label() { return &label_; }
21 JumpTable* next() const { return next_; }
22 Label** targets() const { return targets_; }
23 size_t target_count() const { return target_count_; }
27 JumpTable* const next_;
28 Label** const targets_;
29 size_t const target_count_;
33 CodeGenerator::CodeGenerator(Frame* frame, Linkage* linkage,
34 InstructionSequence* code, CompilationInfo* info)
39 labels_(zone()->NewArray<Label>(code->InstructionBlockCount())),
40 current_block_(RpoNumber::Invalid()),
41 current_source_position_(SourcePosition::Invalid()),
42 masm_(info->isolate(), NULL, 0),
44 safepoints_(code->zone()),
45 handlers_(code->zone()),
46 deoptimization_states_(code->zone()),
47 deoptimization_literals_(code->zone()),
48 translations_(code->zone()),
49 last_lazy_deopt_pc_(0),
50 jump_tables_(nullptr),
53 for (int i = 0; i < code->InstructionBlockCount(); ++i) {
54 new (&labels_[i]) Label;
59 Handle<Code> CodeGenerator::GenerateCode() {
60 CompilationInfo* info = this->info();
62 // Emit a code line info recording start event.
63 PositionsRecorder* recorder = masm()->positions_recorder();
64 LOG_CODE_EVENT(isolate(), CodeStartLinePosInfoRecordEvent(recorder));
66 // Place function entry hook if requested to do so.
67 if (linkage()->GetIncomingDescriptor()->IsJSFunctionCall()) {
68 ProfileEntryHookStub::MaybeCallEntryHook(masm());
71 // Architecture-specific, linkage-specific prologue.
72 info->set_prologue_offset(masm()->pc_offset());
75 // Assemble all non-deferred blocks, followed by deferred ones.
76 for (int deferred = 0; deferred < 2; ++deferred) {
77 for (auto const block : code()->instruction_blocks()) {
78 if (block->IsDeferred() == (deferred == 0)) {
81 // Align loop headers on 16-byte boundaries.
82 if (block->IsLoopHeader()) masm()->Align(16);
83 // Bind a label for a block.
84 current_block_ = block->rpo_number();
85 if (FLAG_code_comments) {
86 // TODO(titzer): these code comments are a giant memory leak.
87 Vector<char> buffer = Vector<char>::New(32);
88 SNPrintF(buffer, "-- B%d start --", block->rpo_number().ToInt());
89 masm()->RecordComment(buffer.start());
91 masm()->bind(GetLabel(current_block_));
92 for (int i = block->code_start(); i < block->code_end(); ++i) {
93 AssembleInstruction(code()->InstructionAt(i));
98 // Assemble all out-of-line code.
100 masm()->RecordComment("-- Out of line code --");
101 for (OutOfLineCode* ool = ools_; ool; ool = ool->next()) {
102 masm()->bind(ool->entry());
104 if (ool->exit()->is_bound()) masm()->jmp(ool->exit());
108 // Ensure there is space for lazy deoptimization in the code.
109 if (!info->IsStub()) {
110 int target_offset = masm()->pc_offset() + Deoptimizer::patch_size();
111 while (masm()->pc_offset() < target_offset) {
118 // Emit the jump tables.
120 masm()->Align(kPointerSize);
121 for (JumpTable* table = jump_tables_; table; table = table->next()) {
122 masm()->bind(table->label());
123 AssembleJumpTable(table->targets(), table->target_count());
127 safepoints()->Emit(masm(), frame()->GetSpillSlotCount());
129 Handle<Code> result = v8::internal::CodeGenerator::MakeCodeEpilogue(
130 masm(), info->flags(), info);
131 result->set_is_turbofanned(true);
132 result->set_stack_slots(frame()->GetSpillSlotCount());
133 result->set_safepoint_table_offset(safepoints()->GetCodeOffset());
135 // Emit exception handler table.
136 if (!handlers_.empty()) {
137 Handle<HandlerTable> table =
138 Handle<HandlerTable>::cast(isolate()->factory()->NewFixedArray(
139 HandlerTable::LengthForReturn(static_cast<int>(handlers_.size())),
141 for (size_t i = 0; i < handlers_.size(); ++i) {
142 table->SetReturnOffset(static_cast<int>(i), handlers_[i].pc_offset);
143 table->SetReturnHandler(static_cast<int>(i), handlers_[i].handler->pos());
145 result->set_handler_table(*table);
148 PopulateDeoptimizationData(result);
150 // Ensure there is space for lazy deoptimization in the relocation info.
151 if (!info->IsStub()) {
152 Deoptimizer::EnsureRelocSpaceForLazyDeoptimization(result);
155 // Emit a code line info recording stop event.
156 void* line_info = recorder->DetachJITHandlerData();
157 LOG_CODE_EVENT(isolate(), CodeEndLinePosInfoRecordEvent(*result, line_info));
163 bool CodeGenerator::IsNextInAssemblyOrder(RpoNumber block) const {
164 return code()->InstructionBlockAt(current_block_)->ao_number().IsNext(
165 code()->InstructionBlockAt(block)->ao_number());
169 void CodeGenerator::RecordSafepoint(PointerMap* pointers, Safepoint::Kind kind,
171 Safepoint::DeoptMode deopt_mode) {
172 const ZoneList<InstructionOperand*>* operands =
173 pointers->GetNormalizedOperands();
174 Safepoint safepoint =
175 safepoints()->DefineSafepoint(masm(), kind, arguments, deopt_mode);
176 for (int i = 0; i < operands->length(); i++) {
177 InstructionOperand* pointer = operands->at(i);
178 if (pointer->IsStackSlot()) {
179 safepoint.DefinePointerSlot(pointer->index(), zone());
180 } else if (pointer->IsRegister() && (kind & Safepoint::kWithRegisters)) {
181 Register reg = Register::FromAllocationIndex(pointer->index());
182 safepoint.DefinePointerRegister(reg, zone());
188 void CodeGenerator::AssembleInstruction(Instruction* instr) {
189 if (instr->IsGapMoves()) {
190 // Handle parallel moves associated with the gap instruction.
191 AssembleGap(GapInstruction::cast(instr));
192 } else if (instr->IsSourcePosition()) {
193 AssembleSourcePosition(SourcePositionInstruction::cast(instr));
195 // Assemble architecture-specific code for the instruction.
196 AssembleArchInstruction(instr);
198 FlagsMode mode = FlagsModeField::decode(instr->opcode());
199 FlagsCondition condition = FlagsConditionField::decode(instr->opcode());
200 if (mode == kFlags_branch) {
201 // Assemble a branch after this instruction.
202 InstructionOperandConverter i(this, instr);
203 RpoNumber true_rpo = i.InputRpo(instr->InputCount() - 2);
204 RpoNumber false_rpo = i.InputRpo(instr->InputCount() - 1);
206 if (true_rpo == false_rpo) {
208 if (!IsNextInAssemblyOrder(true_rpo)) {
209 AssembleArchJump(true_rpo);
213 if (IsNextInAssemblyOrder(true_rpo)) {
214 // true block is next, can fall through if condition negated.
215 std::swap(true_rpo, false_rpo);
216 condition = NegateFlagsCondition(condition);
219 branch.condition = condition;
220 branch.true_label = GetLabel(true_rpo);
221 branch.false_label = GetLabel(false_rpo);
222 branch.fallthru = IsNextInAssemblyOrder(false_rpo);
223 // Assemble architecture-specific branch.
224 AssembleArchBranch(instr, &branch);
225 } else if (mode == kFlags_set) {
226 // Assemble a boolean materialization after this instruction.
227 AssembleArchBoolean(instr, condition);
233 void CodeGenerator::AssembleSourcePosition(SourcePositionInstruction* instr) {
234 SourcePosition source_position = instr->source_position();
235 if (source_position == current_source_position_) return;
236 DCHECK(!source_position.IsInvalid());
237 if (!source_position.IsUnknown()) {
238 int code_pos = source_position.raw();
239 masm()->positions_recorder()->RecordPosition(source_position.raw());
240 masm()->positions_recorder()->WriteRecordedPositions();
241 if (FLAG_code_comments) {
242 Vector<char> buffer = Vector<char>::New(256);
243 CompilationInfo* info = this->info();
244 int ln = Script::GetLineNumber(info->script(), code_pos);
245 int cn = Script::GetColumnNumber(info->script(), code_pos);
246 if (info->script()->name()->IsString()) {
247 Handle<String> file(String::cast(info->script()->name()));
248 base::OS::SNPrintF(buffer.start(), buffer.length(), "-- %s:%d:%d --",
249 file->ToCString().get(), ln, cn);
251 base::OS::SNPrintF(buffer.start(), buffer.length(),
252 "-- <unknown>:%d:%d --", ln, cn);
254 masm()->RecordComment(buffer.start());
257 current_source_position_ = source_position;
261 void CodeGenerator::AssembleGap(GapInstruction* instr) {
262 for (int i = GapInstruction::FIRST_INNER_POSITION;
263 i <= GapInstruction::LAST_INNER_POSITION; i++) {
264 GapInstruction::InnerPosition inner_pos =
265 static_cast<GapInstruction::InnerPosition>(i);
266 ParallelMove* move = instr->GetParallelMove(inner_pos);
267 if (move != NULL) resolver()->Resolve(move);
272 void CodeGenerator::PopulateDeoptimizationData(Handle<Code> code_object) {
273 CompilationInfo* info = this->info();
274 int deopt_count = static_cast<int>(deoptimization_states_.size());
275 if (deopt_count == 0 && !info->is_osr()) return;
276 Handle<DeoptimizationInputData> data =
277 DeoptimizationInputData::New(isolate(), deopt_count, TENURED);
279 Handle<ByteArray> translation_array =
280 translations_.CreateByteArray(isolate()->factory());
282 data->SetTranslationByteArray(*translation_array);
283 data->SetInlinedFunctionCount(Smi::FromInt(0));
284 data->SetOptimizationId(Smi::FromInt(info->optimization_id()));
285 // TODO(jarin) The following code was copied over from Lithium, not sure
286 // whether the scope or the IsOptimizing condition are really needed.
287 if (info->IsOptimizing()) {
288 // Reference to shared function info does not change between phases.
289 AllowDeferredHandleDereference allow_handle_dereference;
290 data->SetSharedFunctionInfo(*info->shared_info());
292 data->SetSharedFunctionInfo(Smi::FromInt(0));
295 Handle<FixedArray> literals = isolate()->factory()->NewFixedArray(
296 static_cast<int>(deoptimization_literals_.size()), TENURED);
298 AllowDeferredHandleDereference copy_handles;
299 for (unsigned i = 0; i < deoptimization_literals_.size(); i++) {
300 literals->set(i, *deoptimization_literals_[i]);
302 data->SetLiteralArray(*literals);
305 if (info->is_osr()) {
306 DCHECK(osr_pc_offset_ >= 0);
307 data->SetOsrAstId(Smi::FromInt(info_->osr_ast_id().ToInt()));
308 data->SetOsrPcOffset(Smi::FromInt(osr_pc_offset_));
310 BailoutId osr_ast_id = BailoutId::None();
311 data->SetOsrAstId(Smi::FromInt(osr_ast_id.ToInt()));
312 data->SetOsrPcOffset(Smi::FromInt(-1));
315 // Populate deoptimization entries.
316 for (int i = 0; i < deopt_count; i++) {
317 DeoptimizationState* deoptimization_state = deoptimization_states_[i];
318 data->SetAstId(i, deoptimization_state->bailout_id());
319 CHECK(deoptimization_states_[i]);
320 data->SetTranslationIndex(
321 i, Smi::FromInt(deoptimization_states_[i]->translation_id()));
322 data->SetArgumentsStackHeight(i, Smi::FromInt(0));
323 data->SetPc(i, Smi::FromInt(deoptimization_state->pc_offset()));
326 code_object->set_deoptimization_data(*data);
330 Label* CodeGenerator::AddJumpTable(Label** targets, size_t target_count) {
331 jump_tables_ = new (zone()) JumpTable(jump_tables_, targets, target_count);
332 return jump_tables_->label();
336 void CodeGenerator::RecordCallPosition(Instruction* instr) {
337 CallDescriptor::Flags flags(MiscField::decode(instr->opcode()));
339 bool needs_frame_state = (flags & CallDescriptor::kNeedsFrameState);
342 instr->pointer_map(), Safepoint::kSimple, 0,
343 needs_frame_state ? Safepoint::kLazyDeopt : Safepoint::kNoLazyDeopt);
345 if (flags & CallDescriptor::kHasExceptionHandler) {
346 InstructionOperandConverter i(this, instr);
347 RpoNumber handler_rpo =
348 i.InputRpo(static_cast<int>(instr->InputCount()) - 1);
349 handlers_.push_back({GetLabel(handler_rpo), masm()->pc_offset()});
352 if (flags & CallDescriptor::kNeedsNopAfterCall) {
353 AddNopForSmiCodeInlining();
356 if (needs_frame_state) {
358 // If the frame state is present, it starts at argument 1 (just after the
360 size_t frame_state_offset = 1;
361 FrameStateDescriptor* descriptor =
362 GetFrameStateDescriptor(instr, frame_state_offset);
363 int pc_offset = masm()->pc_offset();
364 int deopt_state_id = BuildTranslation(instr, pc_offset, frame_state_offset,
365 descriptor->state_combine());
366 // If the pre-call frame state differs from the post-call one, produce the
367 // pre-call frame state, too.
368 // TODO(jarin) We might want to avoid building the pre-call frame state
369 // because it is only used to get locals and arguments (by the debugger and
370 // f.arguments), and those are the same in the pre-call and post-call
372 if (!descriptor->state_combine().IsOutputIgnored()) {
373 deopt_state_id = BuildTranslation(instr, -1, frame_state_offset,
374 OutputFrameStateCombine::Ignore());
377 // Make sure all the values live in stack slots or they are immediates.
378 // (The values should not live in register because registers are clobbered
380 for (size_t i = 0; i < descriptor->GetSize(); i++) {
381 InstructionOperand* op = instr->InputAt(frame_state_offset + 1 + i);
382 CHECK(op->IsStackSlot() || op->IsDoubleStackSlot() || op->IsImmediate());
385 safepoints()->RecordLazyDeoptimizationIndex(deopt_state_id);
390 int CodeGenerator::DefineDeoptimizationLiteral(Handle<Object> literal) {
391 int result = static_cast<int>(deoptimization_literals_.size());
392 for (unsigned i = 0; i < deoptimization_literals_.size(); ++i) {
393 if (deoptimization_literals_[i].is_identical_to(literal)) return i;
395 deoptimization_literals_.push_back(literal);
400 FrameStateDescriptor* CodeGenerator::GetFrameStateDescriptor(
401 Instruction* instr, size_t frame_state_offset) {
402 InstructionOperandConverter i(this, instr);
403 InstructionSequence::StateId state_id =
404 InstructionSequence::StateId::FromInt(i.InputInt32(frame_state_offset));
405 return code()->GetFrameStateDescriptor(state_id);
408 struct OperandAndType {
409 OperandAndType(InstructionOperand* operand, MachineType type)
410 : operand_(operand), type_(type) {}
412 InstructionOperand* operand_;
416 static OperandAndType TypedOperandForFrameState(
417 FrameStateDescriptor* descriptor, Instruction* instr,
418 size_t frame_state_offset, size_t index, OutputFrameStateCombine combine) {
419 DCHECK(index < descriptor->GetSize(combine));
420 switch (combine.kind()) {
421 case OutputFrameStateCombine::kPushOutput: {
422 DCHECK(combine.GetPushCount() <= instr->OutputCount());
423 size_t size_without_output =
424 descriptor->GetSize(OutputFrameStateCombine::Ignore());
425 // If the index is past the existing stack items, return the output.
426 if (index >= size_without_output) {
427 return OperandAndType(instr->OutputAt(index - size_without_output),
432 case OutputFrameStateCombine::kPokeAt:
433 size_t index_from_top =
434 descriptor->GetSize(combine) - 1 - combine.GetOffsetToPokeAt();
435 if (index >= index_from_top &&
436 index < index_from_top + instr->OutputCount()) {
437 return OperandAndType(instr->OutputAt(index - index_from_top),
442 return OperandAndType(instr->InputAt(frame_state_offset + index),
443 descriptor->GetType(index));
447 void CodeGenerator::BuildTranslationForFrameStateDescriptor(
448 FrameStateDescriptor* descriptor, Instruction* instr,
449 Translation* translation, size_t frame_state_offset,
450 OutputFrameStateCombine state_combine) {
451 // Outer-most state must be added to translation first.
452 if (descriptor->outer_state() != NULL) {
453 BuildTranslationForFrameStateDescriptor(descriptor->outer_state(), instr,
454 translation, frame_state_offset,
455 OutputFrameStateCombine::Ignore());
458 int id = Translation::kSelfLiteralId;
459 if (!descriptor->jsfunction().is_null()) {
460 id = DefineDeoptimizationLiteral(
461 Handle<Object>::cast(descriptor->jsfunction().ToHandleChecked()));
464 switch (descriptor->type()) {
466 translation->BeginJSFrame(
467 descriptor->bailout_id(), id,
468 static_cast<unsigned int>(descriptor->GetSize(state_combine) -
469 descriptor->parameters_count()));
471 case ARGUMENTS_ADAPTOR:
472 translation->BeginArgumentsAdaptorFrame(
473 id, static_cast<unsigned int>(descriptor->parameters_count()));
477 frame_state_offset += descriptor->outer_state()->GetTotalSize();
478 for (size_t i = 0; i < descriptor->GetSize(state_combine); i++) {
479 OperandAndType op = TypedOperandForFrameState(
480 descriptor, instr, frame_state_offset, i, state_combine);
481 AddTranslationForOperand(translation, instr, op.operand_, op.type_);
486 int CodeGenerator::BuildTranslation(Instruction* instr, int pc_offset,
487 size_t frame_state_offset,
488 OutputFrameStateCombine state_combine) {
489 FrameStateDescriptor* descriptor =
490 GetFrameStateDescriptor(instr, frame_state_offset);
491 frame_state_offset++;
493 Translation translation(
494 &translations_, static_cast<int>(descriptor->GetFrameCount()),
495 static_cast<int>(descriptor->GetJSFrameCount()), zone());
496 BuildTranslationForFrameStateDescriptor(descriptor, instr, &translation,
497 frame_state_offset, state_combine);
499 int deoptimization_id = static_cast<int>(deoptimization_states_.size());
501 deoptimization_states_.push_back(new (zone()) DeoptimizationState(
502 descriptor->bailout_id(), translation.index(), pc_offset));
504 return deoptimization_id;
508 void CodeGenerator::AddTranslationForOperand(Translation* translation,
510 InstructionOperand* op,
512 if (op->IsStackSlot()) {
513 // TODO(jarin) kMachBool and kRepBit should materialize true and false
514 // rather than creating an int value.
515 if (type == kMachBool || type == kRepBit || type == kMachInt32 ||
516 type == kMachInt8 || type == kMachInt16) {
517 translation->StoreInt32StackSlot(op->index());
518 } else if (type == kMachUint32 || type == kMachUint16 ||
519 type == kMachUint8) {
520 translation->StoreUint32StackSlot(op->index());
521 } else if ((type & kRepMask) == kRepTagged) {
522 translation->StoreStackSlot(op->index());
526 } else if (op->IsDoubleStackSlot()) {
527 DCHECK((type & (kRepFloat32 | kRepFloat64)) != 0);
528 translation->StoreDoubleStackSlot(op->index());
529 } else if (op->IsRegister()) {
530 InstructionOperandConverter converter(this, instr);
531 // TODO(jarin) kMachBool and kRepBit should materialize true and false
532 // rather than creating an int value.
533 if (type == kMachBool || type == kRepBit || type == kMachInt32 ||
534 type == kMachInt8 || type == kMachInt16) {
535 translation->StoreInt32Register(converter.ToRegister(op));
536 } else if (type == kMachUint32 || type == kMachUint16 ||
537 type == kMachUint8) {
538 translation->StoreUint32Register(converter.ToRegister(op));
539 } else if ((type & kRepMask) == kRepTagged) {
540 translation->StoreRegister(converter.ToRegister(op));
544 } else if (op->IsDoubleRegister()) {
545 DCHECK((type & (kRepFloat32 | kRepFloat64)) != 0);
546 InstructionOperandConverter converter(this, instr);
547 translation->StoreDoubleRegister(converter.ToDoubleRegister(op));
548 } else if (op->IsImmediate()) {
549 InstructionOperandConverter converter(this, instr);
550 Constant constant = converter.ToConstant(op);
551 Handle<Object> constant_object;
552 switch (constant.type()) {
553 case Constant::kInt32:
554 DCHECK(type == kMachInt32 || type == kMachUint32 || type == kRepBit);
556 isolate()->factory()->NewNumberFromInt(constant.ToInt32());
558 case Constant::kFloat64:
559 DCHECK(type == kMachFloat64 || type == kMachAnyTagged ||
560 type == kRepTagged || type == (kTypeInt32 | kRepTagged) ||
561 type == (kTypeUint32 | kRepTagged));
562 constant_object = isolate()->factory()->NewNumber(constant.ToFloat64());
564 case Constant::kHeapObject:
565 DCHECK((type & kRepMask) == kRepTagged);
566 constant_object = constant.ToHeapObject();
571 int literal_id = DefineDeoptimizationLiteral(constant_object);
572 translation->StoreLiteral(literal_id);
579 void CodeGenerator::MarkLazyDeoptSite() {
580 last_lazy_deopt_pc_ = masm()->pc_offset();
583 #if !V8_TURBOFAN_BACKEND
585 void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
590 void CodeGenerator::AssembleArchBranch(Instruction* instr,
591 BranchInfo* branch) {
596 void CodeGenerator::AssembleArchBoolean(Instruction* instr,
597 FlagsCondition condition) {
602 void CodeGenerator::AssembleArchJump(RpoNumber target) { UNIMPLEMENTED(); }
605 void CodeGenerator::AssembleDeoptimizerCall(
606 int deoptimization_id, Deoptimizer::BailoutType bailout_type) {
611 void CodeGenerator::AssemblePrologue() { UNIMPLEMENTED(); }
614 void CodeGenerator::AssembleReturn() { UNIMPLEMENTED(); }
617 void CodeGenerator::AssembleMove(InstructionOperand* source,
618 InstructionOperand* destination) {
623 void CodeGenerator::AssembleSwap(InstructionOperand* source,
624 InstructionOperand* destination) {
629 void CodeGenerator::AddNopForSmiCodeInlining() { UNIMPLEMENTED(); }
632 void CodeGenerator::AssembleJumpTable(Label** targets, size_t target_count) {
636 #endif // !V8_TURBOFAN_BACKEND
639 OutOfLineCode::OutOfLineCode(CodeGenerator* gen)
640 : masm_(gen->masm()), next_(gen->ools_) {
645 OutOfLineCode::~OutOfLineCode() {}
647 } // namespace compiler
648 } // namespace internal