1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #ifndef V8_MIPS_LITHIUM_CODEGEN_MIPS_H_
6 #define V8_MIPS_LITHIUM_CODEGEN_MIPS_H_
8 #include "src/deoptimizer.h"
9 #include "src/lithium-codegen.h"
10 #include "src/mips64/lithium-gap-resolver-mips64.h"
11 #include "src/mips64/lithium-mips64.h"
12 #include "src/safepoint-table.h"
13 #include "src/scopes.h"
14 #include "src/utils.h"
19 // Forward declarations.
21 class SafepointGenerator;
23 class LCodeGen: public LCodeGenBase {
25 LCodeGen(LChunk* chunk, MacroAssembler* assembler, CompilationInfo* info)
26 : LCodeGenBase(chunk, assembler, info),
27 deoptimizations_(4, info->zone()),
28 jump_table_(4, info->zone()),
29 inlined_function_count_(0),
30 scope_(info->scope()),
31 translations_(info->zone()),
32 deferred_(8, info->zone()),
34 frame_is_built_(false),
35 safepoints_(info->zone()),
37 expected_safepoint_kind_(Safepoint::kSimple) {
38 PopulateDeoptimizationLiteralsWithInlinedFunctions();
42 int LookupDestination(int block_id) const {
43 return chunk()->LookupDestination(block_id);
46 bool IsNextEmittedBlock(int block_id) const {
47 return LookupDestination(block_id) == GetNextEmittedBlock();
50 bool NeedsEagerFrame() const {
51 return GetStackSlotCount() > 0 ||
52 info()->is_non_deferred_calling() ||
54 info()->requires_frame();
56 bool NeedsDeferredFrame() const {
57 return !NeedsEagerFrame() && info()->is_deferred_calling();
60 RAStatus GetRAState() const {
61 return frame_is_built_ ? kRAHasBeenSaved : kRAHasNotBeenSaved;
64 // Support for converting LOperands to assembler types.
65 // LOperand must be a register.
66 Register ToRegister(LOperand* op) const;
68 // LOperand is loaded into scratch, unless already a register.
69 Register EmitLoadRegister(LOperand* op, Register scratch);
71 // LOperand must be a double register.
72 DoubleRegister ToDoubleRegister(LOperand* op) const;
74 // LOperand is loaded into dbl_scratch, unless already a double register.
75 DoubleRegister EmitLoadDoubleRegister(LOperand* op,
76 FloatRegister flt_scratch,
77 DoubleRegister dbl_scratch);
78 int64_t ToRepresentation_donotuse(LConstantOperand* op,
79 const Representation& r) const;
80 int32_t ToInteger32(LConstantOperand* op) const;
81 Smi* ToSmi(LConstantOperand* op) const;
82 double ToDouble(LConstantOperand* op) const;
83 Operand ToOperand(LOperand* op);
84 MemOperand ToMemOperand(LOperand* op) const;
85 // Returns a MemOperand pointing to the high word of a DoubleStackSlot.
86 MemOperand ToHighMemOperand(LOperand* op) const;
88 bool IsInteger32(LConstantOperand* op) const;
89 bool IsSmi(LConstantOperand* op) const;
90 Handle<Object> ToHandle(LConstantOperand* op) const;
92 // Try to generate code for the entire chunk, but it may fail if the
93 // chunk contains constructs we cannot handle. Returns true if the
94 // code generation attempt succeeded.
97 // Finish the code by setting stack height, safepoint, and bailout
99 void FinishCode(Handle<Code> code);
101 void DoDeferredNumberTagD(LNumberTagD* instr);
103 enum IntegerSignedness { SIGNED_INT32, UNSIGNED_INT32 };
104 void DoDeferredNumberTagIU(LInstruction* instr,
108 IntegerSignedness signedness);
110 void DoDeferredTaggedToI(LTaggedToI* instr);
111 void DoDeferredMathAbsTaggedHeapNumber(LMathAbs* instr);
112 void DoDeferredStackCheck(LStackCheck* instr);
113 void DoDeferredMaybeGrowElements(LMaybeGrowElements* instr);
114 void DoDeferredStringCharCodeAt(LStringCharCodeAt* instr);
115 void DoDeferredStringCharFromCode(LStringCharFromCode* instr);
116 void DoDeferredAllocate(LAllocate* instr);
118 void DoDeferredInstanceMigration(LCheckMaps* instr, Register object);
119 void DoDeferredLoadMutableDouble(LLoadFieldByIndex* instr,
124 // Parallel move support.
125 void DoParallelMove(LParallelMove* move);
126 void DoGap(LGap* instr);
128 MemOperand PrepareKeyedOperand(Register key,
130 bool key_is_constant,
136 // Emit frame translation commands for an environment.
137 void WriteTranslation(LEnvironment* environment, Translation* translation);
139 // Declare methods that deal with the individual node types.
140 #define DECLARE_DO(type) void Do##type(L##type* node);
141 LITHIUM_CONCRETE_INSTRUCTION_LIST(DECLARE_DO)
145 LanguageMode language_mode() const { return info()->language_mode(); }
147 Scope* scope() const { return scope_; }
149 Register scratch0() { return kLithiumScratchReg; }
150 Register scratch1() { return kLithiumScratchReg2; }
151 DoubleRegister double_scratch0() { return kLithiumScratchDouble; }
153 LInstruction* GetNextInstruction();
155 void EmitClassOfTest(Label* if_true,
157 Handle<String> class_name,
160 Register temporary2);
162 int GetStackSlotCount() const { return chunk()->spill_slot_count(); }
164 void AddDeferredCode(LDeferredCode* code) { deferred_.Add(code, zone()); }
166 void SaveCallerDoubles();
167 void RestoreCallerDoubles();
169 // Code generation passes. Returns true if code generation should
171 void GenerateBodyInstructionPre(LInstruction* instr) override;
172 bool GeneratePrologue();
173 bool GenerateDeferredCode();
174 bool GenerateJumpTable();
175 bool GenerateSafepointTable();
177 // Generates the custom OSR entrypoint and sets the osr_pc_offset.
178 void GenerateOsrPrologue();
181 RECORD_SIMPLE_SAFEPOINT,
182 RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS
185 void CallCode(Handle<Code> code,
186 RelocInfo::Mode mode,
187 LInstruction* instr);
189 void CallCodeGeneric(Handle<Code> code,
190 RelocInfo::Mode mode,
192 SafepointMode safepoint_mode);
194 void CallRuntime(const Runtime::Function* function,
197 SaveFPRegsMode save_doubles = kDontSaveFPRegs);
199 void CallRuntime(Runtime::FunctionId id,
201 LInstruction* instr) {
202 const Runtime::Function* function = Runtime::FunctionForId(id);
203 CallRuntime(function, num_arguments, instr);
206 void LoadContextFromDeferred(LOperand* context);
207 void CallRuntimeFromDeferred(Runtime::FunctionId id,
212 // Generate a direct call to a known function. Expects the function
214 void CallKnownFunction(Handle<JSFunction> function,
215 int formal_parameter_count, int arity,
216 LInstruction* instr);
218 void RecordSafepointWithLazyDeopt(LInstruction* instr,
219 SafepointMode safepoint_mode);
221 void RegisterEnvironmentForDeoptimization(LEnvironment* environment,
222 Safepoint::DeoptMode mode);
223 void DeoptimizeIf(Condition condition, LInstruction* instr,
224 Deoptimizer::DeoptReason deopt_reason,
225 Deoptimizer::BailoutType bailout_type,
226 Register src1 = zero_reg,
227 const Operand& src2 = Operand(zero_reg));
229 Condition condition, LInstruction* instr,
230 Deoptimizer::DeoptReason deopt_reason = Deoptimizer::kNoReason,
231 Register src1 = zero_reg, const Operand& src2 = Operand(zero_reg));
233 void AddToTranslation(LEnvironment* environment,
234 Translation* translation,
238 int* object_index_pointer,
239 int* dematerialized_index_pointer);
240 void PopulateDeoptimizationData(Handle<Code> code);
242 void PopulateDeoptimizationLiteralsWithInlinedFunctions();
244 Register ToRegister(int index) const;
245 DoubleRegister ToDoubleRegister(int index) const;
247 MemOperand BuildSeqStringOperand(Register string,
249 String::Encoding encoding);
251 void EmitIntegerMathAbs(LMathAbs* instr);
252 void EmitSmiMathAbs(LMathAbs* instr);
254 // Support for recording safepoint and position information.
255 void RecordSafepoint(LPointerMap* pointers,
256 Safepoint::Kind kind,
258 Safepoint::DeoptMode mode);
259 void RecordSafepoint(LPointerMap* pointers, Safepoint::DeoptMode mode);
260 void RecordSafepoint(Safepoint::DeoptMode mode);
261 void RecordSafepointWithRegisters(LPointerMap* pointers,
263 Safepoint::DeoptMode mode);
265 void RecordAndWritePosition(int position) override;
267 static Condition TokenToCondition(Token::Value op, bool is_unsigned);
268 void EmitGoto(int block);
270 // EmitBranch expects to be the last instruction of a block.
271 template<class InstrType>
272 void EmitBranch(InstrType instr,
275 const Operand& src2);
276 template<class InstrType>
277 void EmitBranchF(InstrType instr,
281 template <class InstrType>
282 void EmitTrueBranch(InstrType instr, Condition condition, Register src1,
283 const Operand& src2);
284 template <class InstrType>
285 void EmitFalseBranch(InstrType instr, Condition condition, Register src1,
286 const Operand& src2);
287 template<class InstrType>
288 void EmitFalseBranchF(InstrType instr,
292 void EmitCmpI(LOperand* left, LOperand* right);
293 void EmitNumberUntagD(LNumberUntagD* instr, Register input,
294 DoubleRegister result, NumberUntagDMode mode);
296 // Emits optimized code for typeof x == "y". Modifies input register.
297 // Returns the condition on which a final split to
298 // true and false label should be made, to optimize fallthrough.
299 // Returns two registers in cmp1 and cmp2 that can be used in the
300 // Branch instruction after EmitTypeofIs.
301 Condition EmitTypeofIs(Label* true_label,
304 Handle<String> type_name,
308 // Emits optimized code for %_IsString(x). Preserves input register.
309 // Returns the condition on which a final split to
310 // true and false label should be made, to optimize fallthrough.
311 Condition EmitIsString(Register input,
313 Label* is_not_string,
314 SmiCheck check_needed);
316 // Emits optimized code for %_IsConstructCall().
317 // Caller should branch on equal condition.
318 void EmitIsConstructCall(Register temp1, Register temp2);
320 // Emits optimized code to deep-copy the contents of statically known
321 // object graphs (e.g. object literal boilerplate).
322 void EmitDeepCopy(Handle<JSObject> object,
326 AllocationSiteMode mode);
327 // Emit optimized code for integer division.
328 // Inputs are signed.
329 // All registers are clobbered.
330 // If 'remainder' is no_reg, it is not computed.
331 void EmitSignedIntegerDivisionByConstant(Register result,
336 LEnvironment* environment);
339 void EnsureSpaceForLazyDeopt(int space_needed) override;
340 void DoLoadKeyedExternalArray(LLoadKeyed* instr);
341 void DoLoadKeyedFixedDoubleArray(LLoadKeyed* instr);
342 void DoLoadKeyedFixedArray(LLoadKeyed* instr);
343 void DoStoreKeyedExternalArray(LStoreKeyed* instr);
344 void DoStoreKeyedFixedDoubleArray(LStoreKeyed* instr);
345 void DoStoreKeyedFixedArray(LStoreKeyed* instr);
348 void EmitVectorLoadICRegisters(T* instr);
350 void EmitVectorStoreICRegisters(T* instr);
352 ZoneList<LEnvironment*> deoptimizations_;
353 ZoneList<Deoptimizer::JumpTableEntry*> jump_table_;
354 int inlined_function_count_;
356 TranslationBuffer translations_;
357 ZoneList<LDeferredCode*> deferred_;
359 bool frame_is_built_;
361 // Builder that keeps track of safepoints in the code. The table
362 // itself is emitted at the end of the generated code.
363 SafepointTableBuilder safepoints_;
365 // Compiler from a set of parallel moves to a sequential list of moves.
366 LGapResolver resolver_;
368 Safepoint::Kind expected_safepoint_kind_;
370 class PushSafepointRegistersScope final BASE_EMBEDDED {
372 explicit PushSafepointRegistersScope(LCodeGen* codegen)
373 : codegen_(codegen) {
374 DCHECK(codegen_->info()->is_calling());
375 DCHECK(codegen_->expected_safepoint_kind_ == Safepoint::kSimple);
376 codegen_->expected_safepoint_kind_ = Safepoint::kWithRegisters;
378 StoreRegistersStateStub stub(codegen_->isolate());
379 codegen_->masm_->push(ra);
380 codegen_->masm_->CallStub(&stub);
383 ~PushSafepointRegistersScope() {
384 DCHECK(codegen_->expected_safepoint_kind_ == Safepoint::kWithRegisters);
385 RestoreRegistersStateStub stub(codegen_->isolate());
386 codegen_->masm_->push(ra);
387 codegen_->masm_->CallStub(&stub);
388 codegen_->expected_safepoint_kind_ = Safepoint::kSimple;
395 friend class LDeferredCode;
396 friend class LEnvironment;
397 friend class SafepointGenerator;
398 DISALLOW_COPY_AND_ASSIGN(LCodeGen);
402 class LDeferredCode : public ZoneObject {
404 explicit LDeferredCode(LCodeGen* codegen)
406 external_exit_(NULL),
407 instruction_index_(codegen->current_instruction_) {
408 codegen->AddDeferredCode(this);
411 virtual ~LDeferredCode() {}
412 virtual void Generate() = 0;
413 virtual LInstruction* instr() = 0;
415 void SetExit(Label* exit) { external_exit_ = exit; }
416 Label* entry() { return &entry_; }
417 Label* exit() { return external_exit_ != NULL ? external_exit_ : &exit_; }
418 int instruction_index() const { return instruction_index_; }
421 LCodeGen* codegen() const { return codegen_; }
422 MacroAssembler* masm() const { return codegen_->masm(); }
428 Label* external_exit_;
429 int instruction_index_;
432 } } // namespace v8::internal
434 #endif // V8_MIPS_LITHIUM_CODEGEN_MIPS_H_