1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #include "src/compiler/code-generator.h"
7 #include "src/compiler/code-generator-impl.h"
8 #include "src/compiler/linkage.h"
9 #include "src/compiler/pipeline.h"
15 class CodeGenerator::JumpTable FINAL : public ZoneObject {
17 JumpTable(JumpTable* next, Label** targets, size_t target_count)
18 : next_(next), targets_(targets), target_count_(target_count) {}
20 Label* label() { return &label_; }
21 JumpTable* next() const { return next_; }
22 Label** targets() const { return targets_; }
23 size_t target_count() const { return target_count_; }
27 JumpTable* const next_;
28 Label** const targets_;
29 size_t const target_count_;
33 CodeGenerator::CodeGenerator(Frame* frame, Linkage* linkage,
34 InstructionSequence* code, CompilationInfo* info)
39 labels_(zone()->NewArray<Label>(code->InstructionBlockCount())),
40 current_block_(BasicBlock::RpoNumber::Invalid()),
41 current_source_position_(SourcePosition::Invalid()),
42 masm_(info->isolate(), NULL, 0),
44 safepoints_(code->zone()),
45 deoptimization_states_(code->zone()),
46 deoptimization_literals_(code->zone()),
47 translations_(code->zone()),
48 last_lazy_deopt_pc_(0),
49 jump_tables_(nullptr),
52 for (int i = 0; i < code->InstructionBlockCount(); ++i) {
53 new (&labels_[i]) Label;
58 Handle<Code> CodeGenerator::GenerateCode() {
59 CompilationInfo* info = this->info();
61 // Emit a code line info recording start event.
62 PositionsRecorder* recorder = masm()->positions_recorder();
63 LOG_CODE_EVENT(isolate(), CodeStartLinePosInfoRecordEvent(recorder));
65 // Place function entry hook if requested to do so.
66 if (linkage()->GetIncomingDescriptor()->IsJSFunctionCall()) {
67 ProfileEntryHookStub::MaybeCallEntryHook(masm());
70 // Architecture-specific, linkage-specific prologue.
71 info->set_prologue_offset(masm()->pc_offset());
74 // Assemble all non-deferred blocks, followed by deferred ones.
75 for (int deferred = 0; deferred < 2; ++deferred) {
76 for (auto const block : code()->instruction_blocks()) {
77 if (block->IsDeferred() == (deferred == 0)) {
80 // Align loop headers on 16-byte boundaries.
81 if (block->IsLoopHeader()) masm()->Align(16);
82 // Bind a label for a block.
83 current_block_ = block->rpo_number();
84 if (FLAG_code_comments) {
85 // TODO(titzer): these code comments are a giant memory leak.
86 Vector<char> buffer = Vector<char>::New(32);
87 SNPrintF(buffer, "-- B%d start --", block->id().ToInt());
88 masm()->RecordComment(buffer.start());
90 masm()->bind(GetLabel(current_block_));
91 for (int i = block->code_start(); i < block->code_end(); ++i) {
92 AssembleInstruction(code()->InstructionAt(i));
97 // Assemble all out-of-line code.
99 masm()->RecordComment("-- Out of line code --");
100 for (OutOfLineCode* ool = ools_; ool; ool = ool->next()) {
101 masm()->bind(ool->entry());
103 if (ool->exit()->is_bound()) masm()->jmp(ool->exit());
107 // Ensure there is space for lazy deoptimization in the code.
108 if (!info->IsStub()) {
109 int target_offset = masm()->pc_offset() + Deoptimizer::patch_size();
110 while (masm()->pc_offset() < target_offset) {
117 // Emit the jump tables.
119 masm()->Align(kPointerSize);
120 for (JumpTable* table = jump_tables_; table; table = table->next()) {
121 masm()->bind(table->label());
122 AssembleJumpTable(table->targets(), table->target_count());
126 safepoints()->Emit(masm(), frame()->GetSpillSlotCount());
128 Handle<Code> result = v8::internal::CodeGenerator::MakeCodeEpilogue(
129 masm(), info->flags(), info);
130 result->set_is_turbofanned(true);
131 result->set_stack_slots(frame()->GetSpillSlotCount());
132 result->set_safepoint_table_offset(safepoints()->GetCodeOffset());
134 PopulateDeoptimizationData(result);
136 // Ensure there is space for lazy deoptimization in the relocation info.
137 if (!info->IsStub()) {
138 Deoptimizer::EnsureRelocSpaceForLazyDeoptimization(result);
141 // Emit a code line info recording stop event.
142 void* line_info = recorder->DetachJITHandlerData();
143 LOG_CODE_EVENT(isolate(), CodeEndLinePosInfoRecordEvent(*result, line_info));
149 bool CodeGenerator::IsNextInAssemblyOrder(BasicBlock::RpoNumber block) const {
150 return code()->InstructionBlockAt(current_block_)->ao_number().IsNext(
151 code()->InstructionBlockAt(block)->ao_number());
155 void CodeGenerator::RecordSafepoint(PointerMap* pointers, Safepoint::Kind kind,
157 Safepoint::DeoptMode deopt_mode) {
158 const ZoneList<InstructionOperand*>* operands =
159 pointers->GetNormalizedOperands();
160 Safepoint safepoint =
161 safepoints()->DefineSafepoint(masm(), kind, arguments, deopt_mode);
162 for (int i = 0; i < operands->length(); i++) {
163 InstructionOperand* pointer = operands->at(i);
164 if (pointer->IsStackSlot()) {
165 safepoint.DefinePointerSlot(pointer->index(), zone());
166 } else if (pointer->IsRegister() && (kind & Safepoint::kWithRegisters)) {
167 Register reg = Register::FromAllocationIndex(pointer->index());
168 safepoint.DefinePointerRegister(reg, zone());
174 void CodeGenerator::AssembleInstruction(Instruction* instr) {
175 if (instr->IsGapMoves()) {
176 // Handle parallel moves associated with the gap instruction.
177 AssembleGap(GapInstruction::cast(instr));
178 } else if (instr->IsSourcePosition()) {
179 AssembleSourcePosition(SourcePositionInstruction::cast(instr));
181 // Assemble architecture-specific code for the instruction.
182 AssembleArchInstruction(instr);
184 FlagsMode mode = FlagsModeField::decode(instr->opcode());
185 FlagsCondition condition = FlagsConditionField::decode(instr->opcode());
186 if (mode == kFlags_branch) {
187 // Assemble a branch after this instruction.
188 InstructionOperandConverter i(this, instr);
189 BasicBlock::RpoNumber true_rpo =
190 i.InputRpo(static_cast<int>(instr->InputCount()) - 2);
191 BasicBlock::RpoNumber false_rpo =
192 i.InputRpo(static_cast<int>(instr->InputCount()) - 1);
194 if (true_rpo == false_rpo) {
196 if (!IsNextInAssemblyOrder(true_rpo)) {
197 AssembleArchJump(true_rpo);
201 if (IsNextInAssemblyOrder(true_rpo)) {
202 // true block is next, can fall through if condition negated.
203 std::swap(true_rpo, false_rpo);
204 condition = NegateFlagsCondition(condition);
207 branch.condition = condition;
208 branch.true_label = GetLabel(true_rpo);
209 branch.false_label = GetLabel(false_rpo);
210 branch.fallthru = IsNextInAssemblyOrder(false_rpo);
211 // Assemble architecture-specific branch.
212 AssembleArchBranch(instr, &branch);
213 } else if (mode == kFlags_set) {
214 // Assemble a boolean materialization after this instruction.
215 AssembleArchBoolean(instr, condition);
221 void CodeGenerator::AssembleSourcePosition(SourcePositionInstruction* instr) {
222 SourcePosition source_position = instr->source_position();
223 if (source_position == current_source_position_) return;
224 DCHECK(!source_position.IsInvalid());
225 if (!source_position.IsUnknown()) {
226 int code_pos = source_position.raw();
227 masm()->positions_recorder()->RecordPosition(source_position.raw());
228 masm()->positions_recorder()->WriteRecordedPositions();
229 if (FLAG_code_comments) {
230 Vector<char> buffer = Vector<char>::New(256);
231 CompilationInfo* info = this->info();
232 int ln = Script::GetLineNumber(info->script(), code_pos);
233 int cn = Script::GetColumnNumber(info->script(), code_pos);
234 if (info->script()->name()->IsString()) {
235 Handle<String> file(String::cast(info->script()->name()));
236 base::OS::SNPrintF(buffer.start(), buffer.length(), "-- %s:%d:%d --",
237 file->ToCString().get(), ln, cn);
239 base::OS::SNPrintF(buffer.start(), buffer.length(),
240 "-- <unknown>:%d:%d --", ln, cn);
242 masm()->RecordComment(buffer.start());
245 current_source_position_ = source_position;
249 void CodeGenerator::AssembleGap(GapInstruction* instr) {
250 for (int i = GapInstruction::FIRST_INNER_POSITION;
251 i <= GapInstruction::LAST_INNER_POSITION; i++) {
252 GapInstruction::InnerPosition inner_pos =
253 static_cast<GapInstruction::InnerPosition>(i);
254 ParallelMove* move = instr->GetParallelMove(inner_pos);
255 if (move != NULL) resolver()->Resolve(move);
260 void CodeGenerator::PopulateDeoptimizationData(Handle<Code> code_object) {
261 CompilationInfo* info = this->info();
262 int deopt_count = static_cast<int>(deoptimization_states_.size());
263 if (deopt_count == 0 && !info->is_osr()) return;
264 Handle<DeoptimizationInputData> data =
265 DeoptimizationInputData::New(isolate(), deopt_count, TENURED);
267 Handle<ByteArray> translation_array =
268 translations_.CreateByteArray(isolate()->factory());
270 data->SetTranslationByteArray(*translation_array);
271 data->SetInlinedFunctionCount(Smi::FromInt(0));
272 data->SetOptimizationId(Smi::FromInt(info->optimization_id()));
273 // TODO(jarin) The following code was copied over from Lithium, not sure
274 // whether the scope or the IsOptimizing condition are really needed.
275 if (info->IsOptimizing()) {
276 // Reference to shared function info does not change between phases.
277 AllowDeferredHandleDereference allow_handle_dereference;
278 data->SetSharedFunctionInfo(*info->shared_info());
280 data->SetSharedFunctionInfo(Smi::FromInt(0));
283 Handle<FixedArray> literals = isolate()->factory()->NewFixedArray(
284 static_cast<int>(deoptimization_literals_.size()), TENURED);
286 AllowDeferredHandleDereference copy_handles;
287 for (unsigned i = 0; i < deoptimization_literals_.size(); i++) {
288 literals->set(i, *deoptimization_literals_[i]);
290 data->SetLiteralArray(*literals);
293 if (info->is_osr()) {
294 DCHECK(osr_pc_offset_ >= 0);
295 data->SetOsrAstId(Smi::FromInt(info_->osr_ast_id().ToInt()));
296 data->SetOsrPcOffset(Smi::FromInt(osr_pc_offset_));
298 BailoutId osr_ast_id = BailoutId::None();
299 data->SetOsrAstId(Smi::FromInt(osr_ast_id.ToInt()));
300 data->SetOsrPcOffset(Smi::FromInt(-1));
303 // Populate deoptimization entries.
304 for (int i = 0; i < deopt_count; i++) {
305 DeoptimizationState* deoptimization_state = deoptimization_states_[i];
306 data->SetAstId(i, deoptimization_state->bailout_id());
307 CHECK(deoptimization_states_[i]);
308 data->SetTranslationIndex(
309 i, Smi::FromInt(deoptimization_states_[i]->translation_id()));
310 data->SetArgumentsStackHeight(i, Smi::FromInt(0));
311 data->SetPc(i, Smi::FromInt(deoptimization_state->pc_offset()));
314 code_object->set_deoptimization_data(*data);
318 Label* CodeGenerator::AddJumpTable(Label** targets, size_t target_count) {
319 jump_tables_ = new (zone()) JumpTable(jump_tables_, targets, target_count);
320 return jump_tables_->label();
324 void CodeGenerator::AddSafepointAndDeopt(Instruction* instr) {
325 CallDescriptor::Flags flags(MiscField::decode(instr->opcode()));
327 bool needs_frame_state = (flags & CallDescriptor::kNeedsFrameState);
330 instr->pointer_map(), Safepoint::kSimple, 0,
331 needs_frame_state ? Safepoint::kLazyDeopt : Safepoint::kNoLazyDeopt);
333 if (flags & CallDescriptor::kNeedsNopAfterCall) {
334 AddNopForSmiCodeInlining();
337 if (needs_frame_state) {
339 // If the frame state is present, it starts at argument 1
340 // (just after the code address).
341 InstructionOperandConverter converter(this, instr);
342 // Deoptimization info starts at argument 1
343 size_t frame_state_offset = 1;
344 FrameStateDescriptor* descriptor =
345 GetFrameStateDescriptor(instr, frame_state_offset);
346 int pc_offset = masm()->pc_offset();
347 int deopt_state_id = BuildTranslation(instr, pc_offset, frame_state_offset,
348 descriptor->state_combine());
349 // If the pre-call frame state differs from the post-call one, produce the
350 // pre-call frame state, too.
351 // TODO(jarin) We might want to avoid building the pre-call frame state
352 // because it is only used to get locals and arguments (by the debugger and
353 // f.arguments), and those are the same in the pre-call and post-call
355 if (!descriptor->state_combine().IsOutputIgnored()) {
356 deopt_state_id = BuildTranslation(instr, -1, frame_state_offset,
357 OutputFrameStateCombine::Ignore());
360 // Make sure all the values live in stack slots or they are immediates.
361 // (The values should not live in register because registers are clobbered
363 for (size_t i = 0; i < descriptor->GetSize(); i++) {
364 InstructionOperand* op = instr->InputAt(frame_state_offset + 1 + i);
365 CHECK(op->IsStackSlot() || op->IsDoubleStackSlot() || op->IsImmediate());
368 safepoints()->RecordLazyDeoptimizationIndex(deopt_state_id);
373 int CodeGenerator::DefineDeoptimizationLiteral(Handle<Object> literal) {
374 int result = static_cast<int>(deoptimization_literals_.size());
375 for (unsigned i = 0; i < deoptimization_literals_.size(); ++i) {
376 if (deoptimization_literals_[i].is_identical_to(literal)) return i;
378 deoptimization_literals_.push_back(literal);
383 FrameStateDescriptor* CodeGenerator::GetFrameStateDescriptor(
384 Instruction* instr, size_t frame_state_offset) {
385 InstructionOperandConverter i(this, instr);
386 InstructionSequence::StateId state_id = InstructionSequence::StateId::FromInt(
387 i.InputInt32(static_cast<int>(frame_state_offset)));
388 return code()->GetFrameStateDescriptor(state_id);
391 struct OperandAndType {
392 OperandAndType(InstructionOperand* operand, MachineType type)
393 : operand_(operand), type_(type) {}
395 InstructionOperand* operand_;
399 static OperandAndType TypedOperandForFrameState(
400 FrameStateDescriptor* descriptor, Instruction* instr,
401 size_t frame_state_offset, size_t index, OutputFrameStateCombine combine) {
402 DCHECK(index < descriptor->GetSize(combine));
403 switch (combine.kind()) {
404 case OutputFrameStateCombine::kPushOutput: {
405 DCHECK(combine.GetPushCount() <= instr->OutputCount());
406 size_t size_without_output =
407 descriptor->GetSize(OutputFrameStateCombine::Ignore());
408 // If the index is past the existing stack items, return the output.
409 if (index >= size_without_output) {
410 return OperandAndType(instr->OutputAt(index - size_without_output),
415 case OutputFrameStateCombine::kPokeAt:
416 size_t index_from_top =
417 descriptor->GetSize(combine) - 1 - combine.GetOffsetToPokeAt();
418 if (index >= index_from_top &&
419 index < index_from_top + instr->OutputCount()) {
420 return OperandAndType(instr->OutputAt(index - index_from_top),
425 return OperandAndType(instr->InputAt(frame_state_offset + index),
426 descriptor->GetType(index));
430 void CodeGenerator::BuildTranslationForFrameStateDescriptor(
431 FrameStateDescriptor* descriptor, Instruction* instr,
432 Translation* translation, size_t frame_state_offset,
433 OutputFrameStateCombine state_combine) {
434 // Outer-most state must be added to translation first.
435 if (descriptor->outer_state() != NULL) {
436 BuildTranslationForFrameStateDescriptor(descriptor->outer_state(), instr,
437 translation, frame_state_offset,
438 OutputFrameStateCombine::Ignore());
441 int id = Translation::kSelfLiteralId;
442 if (!descriptor->jsfunction().is_null()) {
443 id = DefineDeoptimizationLiteral(
444 Handle<Object>::cast(descriptor->jsfunction().ToHandleChecked()));
447 switch (descriptor->type()) {
449 translation->BeginJSFrame(
450 descriptor->bailout_id(), id,
451 static_cast<unsigned int>(descriptor->GetSize(state_combine) -
452 descriptor->parameters_count()));
454 case ARGUMENTS_ADAPTOR:
455 translation->BeginArgumentsAdaptorFrame(
456 id, static_cast<unsigned int>(descriptor->parameters_count()));
460 frame_state_offset += descriptor->outer_state()->GetTotalSize();
461 for (size_t i = 0; i < descriptor->GetSize(state_combine); i++) {
462 OperandAndType op = TypedOperandForFrameState(
463 descriptor, instr, frame_state_offset, i, state_combine);
464 AddTranslationForOperand(translation, instr, op.operand_, op.type_);
469 int CodeGenerator::BuildTranslation(Instruction* instr, int pc_offset,
470 size_t frame_state_offset,
471 OutputFrameStateCombine state_combine) {
472 FrameStateDescriptor* descriptor =
473 GetFrameStateDescriptor(instr, frame_state_offset);
474 frame_state_offset++;
476 Translation translation(
477 &translations_, static_cast<int>(descriptor->GetFrameCount()),
478 static_cast<int>(descriptor->GetJSFrameCount()), zone());
479 BuildTranslationForFrameStateDescriptor(descriptor, instr, &translation,
480 frame_state_offset, state_combine);
482 int deoptimization_id = static_cast<int>(deoptimization_states_.size());
484 deoptimization_states_.push_back(new (zone()) DeoptimizationState(
485 descriptor->bailout_id(), translation.index(), pc_offset));
487 return deoptimization_id;
491 void CodeGenerator::AddTranslationForOperand(Translation* translation,
493 InstructionOperand* op,
495 if (op->IsStackSlot()) {
496 if (type == kMachBool || type == kMachInt32 || type == kMachInt8 ||
497 type == kMachInt16) {
498 translation->StoreInt32StackSlot(op->index());
499 } else if (type == kMachUint32 || type == kMachUint16 ||
500 type == kMachUint8) {
501 translation->StoreUint32StackSlot(op->index());
502 } else if ((type & kRepMask) == kRepTagged) {
503 translation->StoreStackSlot(op->index());
507 } else if (op->IsDoubleStackSlot()) {
508 DCHECK((type & (kRepFloat32 | kRepFloat64)) != 0);
509 translation->StoreDoubleStackSlot(op->index());
510 } else if (op->IsRegister()) {
511 InstructionOperandConverter converter(this, instr);
512 if (type == kMachBool || type == kMachInt32 || type == kMachInt8 ||
513 type == kMachInt16) {
514 translation->StoreInt32Register(converter.ToRegister(op));
515 } else if (type == kMachUint32 || type == kMachUint16 ||
516 type == kMachUint8) {
517 translation->StoreUint32Register(converter.ToRegister(op));
518 } else if ((type & kRepMask) == kRepTagged) {
519 translation->StoreRegister(converter.ToRegister(op));
523 } else if (op->IsDoubleRegister()) {
524 DCHECK((type & (kRepFloat32 | kRepFloat64)) != 0);
525 InstructionOperandConverter converter(this, instr);
526 translation->StoreDoubleRegister(converter.ToDoubleRegister(op));
527 } else if (op->IsImmediate()) {
528 InstructionOperandConverter converter(this, instr);
529 Constant constant = converter.ToConstant(op);
530 Handle<Object> constant_object;
531 switch (constant.type()) {
532 case Constant::kInt32:
533 DCHECK(type == kMachInt32 || type == kMachUint32);
535 isolate()->factory()->NewNumberFromInt(constant.ToInt32());
537 case Constant::kFloat64:
538 DCHECK(type == kMachFloat64 || type == kMachAnyTagged);
539 constant_object = isolate()->factory()->NewNumber(constant.ToFloat64());
541 case Constant::kHeapObject:
542 DCHECK((type & kRepMask) == kRepTagged);
543 constant_object = constant.ToHeapObject();
548 int literal_id = DefineDeoptimizationLiteral(constant_object);
549 translation->StoreLiteral(literal_id);
556 void CodeGenerator::MarkLazyDeoptSite() {
557 last_lazy_deopt_pc_ = masm()->pc_offset();
560 #if !V8_TURBOFAN_BACKEND
562 void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
567 void CodeGenerator::AssembleArchBranch(Instruction* instr,
568 BranchInfo* branch) {
573 void CodeGenerator::AssembleArchBoolean(Instruction* instr,
574 FlagsCondition condition) {
579 void CodeGenerator::AssembleArchJump(BasicBlock::RpoNumber target) {
584 void CodeGenerator::AssembleDeoptimizerCall(int deoptimization_id) {
589 void CodeGenerator::AssemblePrologue() { UNIMPLEMENTED(); }
592 void CodeGenerator::AssembleReturn() { UNIMPLEMENTED(); }
595 void CodeGenerator::AssembleMove(InstructionOperand* source,
596 InstructionOperand* destination) {
601 void CodeGenerator::AssembleSwap(InstructionOperand* source,
602 InstructionOperand* destination) {
607 void CodeGenerator::AddNopForSmiCodeInlining() { UNIMPLEMENTED(); }
610 void CodeGenerator::AssembleJumpTable(Label** targets, size_t target_count) {
614 #endif // !V8_TURBOFAN_BACKEND
617 OutOfLineCode::OutOfLineCode(CodeGenerator* gen)
618 : masm_(gen->masm()), next_(gen->ools_) {
623 OutOfLineCode::~OutOfLineCode() {}
625 } // namespace compiler
626 } // namespace internal