deps: update v8 to 4.3.61.21
[platform/upstream/nodejs.git] / deps / v8 / src / compiler / code-generator.cc
1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/compiler/code-generator.h"
6
7 #include "src/compiler/code-generator-impl.h"
8 #include "src/compiler/linkage.h"
9 #include "src/compiler/pipeline.h"
10
11 namespace v8 {
12 namespace internal {
13 namespace compiler {
14
15 class CodeGenerator::JumpTable FINAL : public ZoneObject {
16  public:
17   JumpTable(JumpTable* next, Label** targets, size_t target_count)
18       : next_(next), targets_(targets), target_count_(target_count) {}
19
20   Label* label() { return &label_; }
21   JumpTable* next() const { return next_; }
22   Label** targets() const { return targets_; }
23   size_t target_count() const { return target_count_; }
24
25  private:
26   Label label_;
27   JumpTable* const next_;
28   Label** const targets_;
29   size_t const target_count_;
30 };
31
32
33 CodeGenerator::CodeGenerator(Frame* frame, Linkage* linkage,
34                              InstructionSequence* code, CompilationInfo* info)
35     : frame_(frame),
36       linkage_(linkage),
37       code_(code),
38       info_(info),
39       labels_(zone()->NewArray<Label>(code->InstructionBlockCount())),
40       current_block_(RpoNumber::Invalid()),
41       current_source_position_(SourcePosition::Invalid()),
42       masm_(info->isolate(), NULL, 0),
43       resolver_(this),
44       safepoints_(code->zone()),
45       handlers_(code->zone()),
46       deoptimization_states_(code->zone()),
47       deoptimization_literals_(code->zone()),
48       translations_(code->zone()),
49       last_lazy_deopt_pc_(0),
50       jump_tables_(nullptr),
51       ools_(nullptr),
52       osr_pc_offset_(-1) {
53   for (int i = 0; i < code->InstructionBlockCount(); ++i) {
54     new (&labels_[i]) Label;
55   }
56 }
57
58
59 Handle<Code> CodeGenerator::GenerateCode() {
60   CompilationInfo* info = this->info();
61
62   // Emit a code line info recording start event.
63   PositionsRecorder* recorder = masm()->positions_recorder();
64   LOG_CODE_EVENT(isolate(), CodeStartLinePosInfoRecordEvent(recorder));
65
66   // Place function entry hook if requested to do so.
67   if (linkage()->GetIncomingDescriptor()->IsJSFunctionCall()) {
68     ProfileEntryHookStub::MaybeCallEntryHook(masm());
69   }
70
71   // Architecture-specific, linkage-specific prologue.
72   info->set_prologue_offset(masm()->pc_offset());
73   AssemblePrologue();
74
75   // Assemble all non-deferred blocks, followed by deferred ones.
76   for (int deferred = 0; deferred < 2; ++deferred) {
77     for (auto const block : code()->instruction_blocks()) {
78       if (block->IsDeferred() == (deferred == 0)) {
79         continue;
80       }
81       // Align loop headers on 16-byte boundaries.
82       if (block->IsLoopHeader()) masm()->Align(16);
83       // Bind a label for a block.
84       current_block_ = block->rpo_number();
85       if (FLAG_code_comments) {
86         // TODO(titzer): these code comments are a giant memory leak.
87         Vector<char> buffer = Vector<char>::New(32);
88         SNPrintF(buffer, "-- B%d start --", block->rpo_number().ToInt());
89         masm()->RecordComment(buffer.start());
90       }
91       masm()->bind(GetLabel(current_block_));
92       for (int i = block->code_start(); i < block->code_end(); ++i) {
93         AssembleInstruction(code()->InstructionAt(i));
94       }
95     }
96   }
97
98   // Assemble all out-of-line code.
99   if (ools_) {
100     masm()->RecordComment("-- Out of line code --");
101     for (OutOfLineCode* ool = ools_; ool; ool = ool->next()) {
102       masm()->bind(ool->entry());
103       ool->Generate();
104       if (ool->exit()->is_bound()) masm()->jmp(ool->exit());
105     }
106   }
107
108   // Ensure there is space for lazy deoptimization in the code.
109   if (!info->IsStub()) {
110     int target_offset = masm()->pc_offset() + Deoptimizer::patch_size();
111     while (masm()->pc_offset() < target_offset) {
112       masm()->nop();
113     }
114   }
115
116   FinishCode(masm());
117
118   // Emit the jump tables.
119   if (jump_tables_) {
120     masm()->Align(kPointerSize);
121     for (JumpTable* table = jump_tables_; table; table = table->next()) {
122       masm()->bind(table->label());
123       AssembleJumpTable(table->targets(), table->target_count());
124     }
125   }
126
127   safepoints()->Emit(masm(), frame()->GetSpillSlotCount());
128
129   Handle<Code> result = v8::internal::CodeGenerator::MakeCodeEpilogue(
130       masm(), info->flags(), info);
131   result->set_is_turbofanned(true);
132   result->set_stack_slots(frame()->GetSpillSlotCount());
133   result->set_safepoint_table_offset(safepoints()->GetCodeOffset());
134
135   // Emit exception handler table.
136   if (!handlers_.empty()) {
137     Handle<HandlerTable> table =
138         Handle<HandlerTable>::cast(isolate()->factory()->NewFixedArray(
139             HandlerTable::LengthForReturn(static_cast<int>(handlers_.size())),
140             TENURED));
141     for (size_t i = 0; i < handlers_.size(); ++i) {
142       table->SetReturnOffset(static_cast<int>(i), handlers_[i].pc_offset);
143       table->SetReturnHandler(static_cast<int>(i), handlers_[i].handler->pos());
144     }
145     result->set_handler_table(*table);
146   }
147
148   PopulateDeoptimizationData(result);
149
150   // Ensure there is space for lazy deoptimization in the relocation info.
151   if (!info->IsStub()) {
152     Deoptimizer::EnsureRelocSpaceForLazyDeoptimization(result);
153   }
154
155   // Emit a code line info recording stop event.
156   void* line_info = recorder->DetachJITHandlerData();
157   LOG_CODE_EVENT(isolate(), CodeEndLinePosInfoRecordEvent(*result, line_info));
158
159   return result;
160 }
161
162
163 bool CodeGenerator::IsNextInAssemblyOrder(RpoNumber block) const {
164   return code()->InstructionBlockAt(current_block_)->ao_number().IsNext(
165       code()->InstructionBlockAt(block)->ao_number());
166 }
167
168
169 void CodeGenerator::RecordSafepoint(PointerMap* pointers, Safepoint::Kind kind,
170                                     int arguments,
171                                     Safepoint::DeoptMode deopt_mode) {
172   const ZoneList<InstructionOperand*>* operands =
173       pointers->GetNormalizedOperands();
174   Safepoint safepoint =
175       safepoints()->DefineSafepoint(masm(), kind, arguments, deopt_mode);
176   for (int i = 0; i < operands->length(); i++) {
177     InstructionOperand* pointer = operands->at(i);
178     if (pointer->IsStackSlot()) {
179       safepoint.DefinePointerSlot(pointer->index(), zone());
180     } else if (pointer->IsRegister() && (kind & Safepoint::kWithRegisters)) {
181       Register reg = Register::FromAllocationIndex(pointer->index());
182       safepoint.DefinePointerRegister(reg, zone());
183     }
184   }
185 }
186
187
188 void CodeGenerator::AssembleInstruction(Instruction* instr) {
189   if (instr->IsGapMoves()) {
190     // Handle parallel moves associated with the gap instruction.
191     AssembleGap(GapInstruction::cast(instr));
192   } else if (instr->IsSourcePosition()) {
193     AssembleSourcePosition(SourcePositionInstruction::cast(instr));
194   } else {
195     // Assemble architecture-specific code for the instruction.
196     AssembleArchInstruction(instr);
197
198     FlagsMode mode = FlagsModeField::decode(instr->opcode());
199     FlagsCondition condition = FlagsConditionField::decode(instr->opcode());
200     if (mode == kFlags_branch) {
201       // Assemble a branch after this instruction.
202       InstructionOperandConverter i(this, instr);
203       RpoNumber true_rpo = i.InputRpo(instr->InputCount() - 2);
204       RpoNumber false_rpo = i.InputRpo(instr->InputCount() - 1);
205
206       if (true_rpo == false_rpo) {
207         // redundant branch.
208         if (!IsNextInAssemblyOrder(true_rpo)) {
209           AssembleArchJump(true_rpo);
210         }
211         return;
212       }
213       if (IsNextInAssemblyOrder(true_rpo)) {
214         // true block is next, can fall through if condition negated.
215         std::swap(true_rpo, false_rpo);
216         condition = NegateFlagsCondition(condition);
217       }
218       BranchInfo branch;
219       branch.condition = condition;
220       branch.true_label = GetLabel(true_rpo);
221       branch.false_label = GetLabel(false_rpo);
222       branch.fallthru = IsNextInAssemblyOrder(false_rpo);
223       // Assemble architecture-specific branch.
224       AssembleArchBranch(instr, &branch);
225     } else if (mode == kFlags_set) {
226       // Assemble a boolean materialization after this instruction.
227       AssembleArchBoolean(instr, condition);
228     }
229   }
230 }
231
232
233 void CodeGenerator::AssembleSourcePosition(SourcePositionInstruction* instr) {
234   SourcePosition source_position = instr->source_position();
235   if (source_position == current_source_position_) return;
236   DCHECK(!source_position.IsInvalid());
237   if (!source_position.IsUnknown()) {
238     int code_pos = source_position.raw();
239     masm()->positions_recorder()->RecordPosition(source_position.raw());
240     masm()->positions_recorder()->WriteRecordedPositions();
241     if (FLAG_code_comments) {
242       Vector<char> buffer = Vector<char>::New(256);
243       CompilationInfo* info = this->info();
244       int ln = Script::GetLineNumber(info->script(), code_pos);
245       int cn = Script::GetColumnNumber(info->script(), code_pos);
246       if (info->script()->name()->IsString()) {
247         Handle<String> file(String::cast(info->script()->name()));
248         base::OS::SNPrintF(buffer.start(), buffer.length(), "-- %s:%d:%d --",
249                            file->ToCString().get(), ln, cn);
250       } else {
251         base::OS::SNPrintF(buffer.start(), buffer.length(),
252                            "-- <unknown>:%d:%d --", ln, cn);
253       }
254       masm()->RecordComment(buffer.start());
255     }
256   }
257   current_source_position_ = source_position;
258 }
259
260
261 void CodeGenerator::AssembleGap(GapInstruction* instr) {
262   for (int i = GapInstruction::FIRST_INNER_POSITION;
263        i <= GapInstruction::LAST_INNER_POSITION; i++) {
264     GapInstruction::InnerPosition inner_pos =
265         static_cast<GapInstruction::InnerPosition>(i);
266     ParallelMove* move = instr->GetParallelMove(inner_pos);
267     if (move != NULL) resolver()->Resolve(move);
268   }
269 }
270
271
272 void CodeGenerator::PopulateDeoptimizationData(Handle<Code> code_object) {
273   CompilationInfo* info = this->info();
274   int deopt_count = static_cast<int>(deoptimization_states_.size());
275   if (deopt_count == 0 && !info->is_osr()) return;
276   Handle<DeoptimizationInputData> data =
277       DeoptimizationInputData::New(isolate(), deopt_count, TENURED);
278
279   Handle<ByteArray> translation_array =
280       translations_.CreateByteArray(isolate()->factory());
281
282   data->SetTranslationByteArray(*translation_array);
283   data->SetInlinedFunctionCount(Smi::FromInt(0));
284   data->SetOptimizationId(Smi::FromInt(info->optimization_id()));
285   // TODO(jarin) The following code was copied over from Lithium, not sure
286   // whether the scope or the IsOptimizing condition are really needed.
287   if (info->IsOptimizing()) {
288     // Reference to shared function info does not change between phases.
289     AllowDeferredHandleDereference allow_handle_dereference;
290     data->SetSharedFunctionInfo(*info->shared_info());
291   } else {
292     data->SetSharedFunctionInfo(Smi::FromInt(0));
293   }
294
295   Handle<FixedArray> literals = isolate()->factory()->NewFixedArray(
296       static_cast<int>(deoptimization_literals_.size()), TENURED);
297   {
298     AllowDeferredHandleDereference copy_handles;
299     for (unsigned i = 0; i < deoptimization_literals_.size(); i++) {
300       literals->set(i, *deoptimization_literals_[i]);
301     }
302     data->SetLiteralArray(*literals);
303   }
304
305   if (info->is_osr()) {
306     DCHECK(osr_pc_offset_ >= 0);
307     data->SetOsrAstId(Smi::FromInt(info_->osr_ast_id().ToInt()));
308     data->SetOsrPcOffset(Smi::FromInt(osr_pc_offset_));
309   } else {
310     BailoutId osr_ast_id = BailoutId::None();
311     data->SetOsrAstId(Smi::FromInt(osr_ast_id.ToInt()));
312     data->SetOsrPcOffset(Smi::FromInt(-1));
313   }
314
315   // Populate deoptimization entries.
316   for (int i = 0; i < deopt_count; i++) {
317     DeoptimizationState* deoptimization_state = deoptimization_states_[i];
318     data->SetAstId(i, deoptimization_state->bailout_id());
319     CHECK(deoptimization_states_[i]);
320     data->SetTranslationIndex(
321         i, Smi::FromInt(deoptimization_states_[i]->translation_id()));
322     data->SetArgumentsStackHeight(i, Smi::FromInt(0));
323     data->SetPc(i, Smi::FromInt(deoptimization_state->pc_offset()));
324   }
325
326   code_object->set_deoptimization_data(*data);
327 }
328
329
330 Label* CodeGenerator::AddJumpTable(Label** targets, size_t target_count) {
331   jump_tables_ = new (zone()) JumpTable(jump_tables_, targets, target_count);
332   return jump_tables_->label();
333 }
334
335
336 void CodeGenerator::RecordCallPosition(Instruction* instr) {
337   CallDescriptor::Flags flags(MiscField::decode(instr->opcode()));
338
339   bool needs_frame_state = (flags & CallDescriptor::kNeedsFrameState);
340
341   RecordSafepoint(
342       instr->pointer_map(), Safepoint::kSimple, 0,
343       needs_frame_state ? Safepoint::kLazyDeopt : Safepoint::kNoLazyDeopt);
344
345   if (flags & CallDescriptor::kHasExceptionHandler) {
346     InstructionOperandConverter i(this, instr);
347     RpoNumber handler_rpo =
348         i.InputRpo(static_cast<int>(instr->InputCount()) - 1);
349     handlers_.push_back({GetLabel(handler_rpo), masm()->pc_offset()});
350   }
351
352   if (flags & CallDescriptor::kNeedsNopAfterCall) {
353     AddNopForSmiCodeInlining();
354   }
355
356   if (needs_frame_state) {
357     MarkLazyDeoptSite();
358     // If the frame state is present, it starts at argument 1 (just after the
359     // code address).
360     size_t frame_state_offset = 1;
361     FrameStateDescriptor* descriptor =
362         GetFrameStateDescriptor(instr, frame_state_offset);
363     int pc_offset = masm()->pc_offset();
364     int deopt_state_id = BuildTranslation(instr, pc_offset, frame_state_offset,
365                                           descriptor->state_combine());
366     // If the pre-call frame state differs from the post-call one, produce the
367     // pre-call frame state, too.
368     // TODO(jarin) We might want to avoid building the pre-call frame state
369     // because it is only used to get locals and arguments (by the debugger and
370     // f.arguments), and those are the same in the pre-call and post-call
371     // states.
372     if (!descriptor->state_combine().IsOutputIgnored()) {
373       deopt_state_id = BuildTranslation(instr, -1, frame_state_offset,
374                                         OutputFrameStateCombine::Ignore());
375     }
376 #if DEBUG
377     // Make sure all the values live in stack slots or they are immediates.
378     // (The values should not live in register because registers are clobbered
379     // by calls.)
380     for (size_t i = 0; i < descriptor->GetSize(); i++) {
381       InstructionOperand* op = instr->InputAt(frame_state_offset + 1 + i);
382       CHECK(op->IsStackSlot() || op->IsDoubleStackSlot() || op->IsImmediate());
383     }
384 #endif
385     safepoints()->RecordLazyDeoptimizationIndex(deopt_state_id);
386   }
387 }
388
389
390 int CodeGenerator::DefineDeoptimizationLiteral(Handle<Object> literal) {
391   int result = static_cast<int>(deoptimization_literals_.size());
392   for (unsigned i = 0; i < deoptimization_literals_.size(); ++i) {
393     if (deoptimization_literals_[i].is_identical_to(literal)) return i;
394   }
395   deoptimization_literals_.push_back(literal);
396   return result;
397 }
398
399
400 FrameStateDescriptor* CodeGenerator::GetFrameStateDescriptor(
401     Instruction* instr, size_t frame_state_offset) {
402   InstructionOperandConverter i(this, instr);
403   InstructionSequence::StateId state_id =
404       InstructionSequence::StateId::FromInt(i.InputInt32(frame_state_offset));
405   return code()->GetFrameStateDescriptor(state_id);
406 }
407
408 struct OperandAndType {
409   OperandAndType(InstructionOperand* operand, MachineType type)
410       : operand_(operand), type_(type) {}
411
412   InstructionOperand* operand_;
413   MachineType type_;
414 };
415
416 static OperandAndType TypedOperandForFrameState(
417     FrameStateDescriptor* descriptor, Instruction* instr,
418     size_t frame_state_offset, size_t index, OutputFrameStateCombine combine) {
419   DCHECK(index < descriptor->GetSize(combine));
420   switch (combine.kind()) {
421     case OutputFrameStateCombine::kPushOutput: {
422       DCHECK(combine.GetPushCount() <= instr->OutputCount());
423       size_t size_without_output =
424           descriptor->GetSize(OutputFrameStateCombine::Ignore());
425       // If the index is past the existing stack items, return the output.
426       if (index >= size_without_output) {
427         return OperandAndType(instr->OutputAt(index - size_without_output),
428                               kMachAnyTagged);
429       }
430       break;
431     }
432     case OutputFrameStateCombine::kPokeAt:
433       size_t index_from_top =
434           descriptor->GetSize(combine) - 1 - combine.GetOffsetToPokeAt();
435       if (index >= index_from_top &&
436           index < index_from_top + instr->OutputCount()) {
437         return OperandAndType(instr->OutputAt(index - index_from_top),
438                               kMachAnyTagged);
439       }
440       break;
441   }
442   return OperandAndType(instr->InputAt(frame_state_offset + index),
443                         descriptor->GetType(index));
444 }
445
446
447 void CodeGenerator::BuildTranslationForFrameStateDescriptor(
448     FrameStateDescriptor* descriptor, Instruction* instr,
449     Translation* translation, size_t frame_state_offset,
450     OutputFrameStateCombine state_combine) {
451   // Outer-most state must be added to translation first.
452   if (descriptor->outer_state() != NULL) {
453     BuildTranslationForFrameStateDescriptor(descriptor->outer_state(), instr,
454                                             translation, frame_state_offset,
455                                             OutputFrameStateCombine::Ignore());
456   }
457
458   int id = Translation::kSelfLiteralId;
459   if (!descriptor->jsfunction().is_null()) {
460     id = DefineDeoptimizationLiteral(
461         Handle<Object>::cast(descriptor->jsfunction().ToHandleChecked()));
462   }
463
464   switch (descriptor->type()) {
465     case JS_FRAME:
466       translation->BeginJSFrame(
467           descriptor->bailout_id(), id,
468           static_cast<unsigned int>(descriptor->GetSize(state_combine) -
469                                     descriptor->parameters_count()));
470       break;
471     case ARGUMENTS_ADAPTOR:
472       translation->BeginArgumentsAdaptorFrame(
473           id, static_cast<unsigned int>(descriptor->parameters_count()));
474       break;
475   }
476
477   frame_state_offset += descriptor->outer_state()->GetTotalSize();
478   for (size_t i = 0; i < descriptor->GetSize(state_combine); i++) {
479     OperandAndType op = TypedOperandForFrameState(
480         descriptor, instr, frame_state_offset, i, state_combine);
481     AddTranslationForOperand(translation, instr, op.operand_, op.type_);
482   }
483 }
484
485
486 int CodeGenerator::BuildTranslation(Instruction* instr, int pc_offset,
487                                     size_t frame_state_offset,
488                                     OutputFrameStateCombine state_combine) {
489   FrameStateDescriptor* descriptor =
490       GetFrameStateDescriptor(instr, frame_state_offset);
491   frame_state_offset++;
492
493   Translation translation(
494       &translations_, static_cast<int>(descriptor->GetFrameCount()),
495       static_cast<int>(descriptor->GetJSFrameCount()), zone());
496   BuildTranslationForFrameStateDescriptor(descriptor, instr, &translation,
497                                           frame_state_offset, state_combine);
498
499   int deoptimization_id = static_cast<int>(deoptimization_states_.size());
500
501   deoptimization_states_.push_back(new (zone()) DeoptimizationState(
502       descriptor->bailout_id(), translation.index(), pc_offset));
503
504   return deoptimization_id;
505 }
506
507
508 void CodeGenerator::AddTranslationForOperand(Translation* translation,
509                                              Instruction* instr,
510                                              InstructionOperand* op,
511                                              MachineType type) {
512   if (op->IsStackSlot()) {
513     // TODO(jarin) kMachBool and kRepBit should materialize true and false
514     // rather than creating an int value.
515     if (type == kMachBool || type == kRepBit || type == kMachInt32 ||
516         type == kMachInt8 || type == kMachInt16) {
517       translation->StoreInt32StackSlot(op->index());
518     } else if (type == kMachUint32 || type == kMachUint16 ||
519                type == kMachUint8) {
520       translation->StoreUint32StackSlot(op->index());
521     } else if ((type & kRepMask) == kRepTagged) {
522       translation->StoreStackSlot(op->index());
523     } else {
524       CHECK(false);
525     }
526   } else if (op->IsDoubleStackSlot()) {
527     DCHECK((type & (kRepFloat32 | kRepFloat64)) != 0);
528     translation->StoreDoubleStackSlot(op->index());
529   } else if (op->IsRegister()) {
530     InstructionOperandConverter converter(this, instr);
531     // TODO(jarin) kMachBool and kRepBit should materialize true and false
532     // rather than creating an int value.
533     if (type == kMachBool || type == kRepBit || type == kMachInt32 ||
534         type == kMachInt8 || type == kMachInt16) {
535       translation->StoreInt32Register(converter.ToRegister(op));
536     } else if (type == kMachUint32 || type == kMachUint16 ||
537                type == kMachUint8) {
538       translation->StoreUint32Register(converter.ToRegister(op));
539     } else if ((type & kRepMask) == kRepTagged) {
540       translation->StoreRegister(converter.ToRegister(op));
541     } else {
542       CHECK(false);
543     }
544   } else if (op->IsDoubleRegister()) {
545     DCHECK((type & (kRepFloat32 | kRepFloat64)) != 0);
546     InstructionOperandConverter converter(this, instr);
547     translation->StoreDoubleRegister(converter.ToDoubleRegister(op));
548   } else if (op->IsImmediate()) {
549     InstructionOperandConverter converter(this, instr);
550     Constant constant = converter.ToConstant(op);
551     Handle<Object> constant_object;
552     switch (constant.type()) {
553       case Constant::kInt32:
554         DCHECK(type == kMachInt32 || type == kMachUint32 || type == kRepBit);
555         constant_object =
556             isolate()->factory()->NewNumberFromInt(constant.ToInt32());
557         break;
558       case Constant::kFloat64:
559         DCHECK(type == kMachFloat64 || type == kMachAnyTagged ||
560                type == kRepTagged || type == (kTypeInt32 | kRepTagged) ||
561                type == (kTypeUint32 | kRepTagged));
562         constant_object = isolate()->factory()->NewNumber(constant.ToFloat64());
563         break;
564       case Constant::kHeapObject:
565         DCHECK((type & kRepMask) == kRepTagged);
566         constant_object = constant.ToHeapObject();
567         break;
568       default:
569         CHECK(false);
570     }
571     int literal_id = DefineDeoptimizationLiteral(constant_object);
572     translation->StoreLiteral(literal_id);
573   } else {
574     CHECK(false);
575   }
576 }
577
578
579 void CodeGenerator::MarkLazyDeoptSite() {
580   last_lazy_deopt_pc_ = masm()->pc_offset();
581 }
582
583 #if !V8_TURBOFAN_BACKEND
584
585 void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
586   UNIMPLEMENTED();
587 }
588
589
590 void CodeGenerator::AssembleArchBranch(Instruction* instr,
591                                        BranchInfo* branch) {
592   UNIMPLEMENTED();
593 }
594
595
596 void CodeGenerator::AssembleArchBoolean(Instruction* instr,
597                                         FlagsCondition condition) {
598   UNIMPLEMENTED();
599 }
600
601
602 void CodeGenerator::AssembleArchJump(RpoNumber target) { UNIMPLEMENTED(); }
603
604
605 void CodeGenerator::AssembleDeoptimizerCall(
606     int deoptimization_id, Deoptimizer::BailoutType bailout_type) {
607   UNIMPLEMENTED();
608 }
609
610
611 void CodeGenerator::AssemblePrologue() { UNIMPLEMENTED(); }
612
613
614 void CodeGenerator::AssembleReturn() { UNIMPLEMENTED(); }
615
616
617 void CodeGenerator::AssembleMove(InstructionOperand* source,
618                                  InstructionOperand* destination) {
619   UNIMPLEMENTED();
620 }
621
622
623 void CodeGenerator::AssembleSwap(InstructionOperand* source,
624                                  InstructionOperand* destination) {
625   UNIMPLEMENTED();
626 }
627
628
629 void CodeGenerator::AddNopForSmiCodeInlining() { UNIMPLEMENTED(); }
630
631
632 void CodeGenerator::AssembleJumpTable(Label** targets, size_t target_count) {
633   UNIMPLEMENTED();
634 }
635
636 #endif  // !V8_TURBOFAN_BACKEND
637
638
639 OutOfLineCode::OutOfLineCode(CodeGenerator* gen)
640     : masm_(gen->masm()), next_(gen->ools_) {
641   gen->ools_ = this;
642 }
643
644
645 OutOfLineCode::~OutOfLineCode() {}
646
647 }  // namespace compiler
648 }  // namespace internal
649 }  // namespace v8