1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #ifndef V8_MIPS_LITHIUM_CODEGEN_MIPS_H_
6 #define V8_MIPS_LITHIUM_CODEGEN_MIPS_H_
8 #include "src/deoptimizer.h"
9 #include "src/lithium-codegen.h"
10 #include "src/mips/lithium-gap-resolver-mips.h"
11 #include "src/mips/lithium-mips.h"
12 #include "src/safepoint-table.h"
13 #include "src/scopes.h"
14 #include "src/utils.h"
19 // Forward declarations.
21 class SafepointGenerator;
23 class LCodeGen: public LCodeGenBase {
25 LCodeGen(LChunk* chunk, MacroAssembler* assembler, CompilationInfo* info)
26 : LCodeGenBase(chunk, assembler, info),
27 deoptimizations_(4, info->zone()),
28 jump_table_(4, info->zone()),
29 inlined_function_count_(0),
30 scope_(info->scope()),
31 translations_(info->zone()),
32 deferred_(8, info->zone()),
34 frame_is_built_(false),
35 safepoints_(info->zone()),
37 expected_safepoint_kind_(Safepoint::kSimple) {
38 PopulateDeoptimizationLiteralsWithInlinedFunctions();
42 int LookupDestination(int block_id) const {
43 return chunk()->LookupDestination(block_id);
46 bool IsNextEmittedBlock(int block_id) const {
47 return LookupDestination(block_id) == GetNextEmittedBlock();
50 bool NeedsEagerFrame() const {
51 return GetStackSlotCount() > 0 ||
52 info()->is_non_deferred_calling() ||
54 info()->requires_frame();
56 bool NeedsDeferredFrame() const {
57 return !NeedsEagerFrame() && info()->is_deferred_calling();
60 RAStatus GetRAState() const {
61 return frame_is_built_ ? kRAHasBeenSaved : kRAHasNotBeenSaved;
64 // Support for converting LOperands to assembler types.
65 // LOperand must be a register.
66 Register ToRegister(LOperand* op) const;
68 // LOperand is loaded into scratch, unless already a register.
69 Register EmitLoadRegister(LOperand* op, Register scratch);
71 // LOperand must be a double register.
72 DoubleRegister ToDoubleRegister(LOperand* op) const;
74 // LOperand is loaded into dbl_scratch, unless already a double register.
75 DoubleRegister EmitLoadDoubleRegister(LOperand* op,
76 FloatRegister flt_scratch,
77 DoubleRegister dbl_scratch);
78 int32_t ToRepresentation(LConstantOperand* op, const Representation& r) const;
79 int32_t ToInteger32(LConstantOperand* op) const;
80 Smi* ToSmi(LConstantOperand* op) const;
81 double ToDouble(LConstantOperand* op) const;
82 Operand ToOperand(LOperand* op);
83 MemOperand ToMemOperand(LOperand* op) const;
84 // Returns a MemOperand pointing to the high word of a DoubleStackSlot.
85 MemOperand ToHighMemOperand(LOperand* op) const;
87 bool IsInteger32(LConstantOperand* op) const;
88 bool IsSmi(LConstantOperand* op) const;
89 Handle<Object> ToHandle(LConstantOperand* op) const;
91 // Try to generate code for the entire chunk, but it may fail if the
92 // chunk contains constructs we cannot handle. Returns true if the
93 // code generation attempt succeeded.
96 // Finish the code by setting stack height, safepoint, and bailout
98 void FinishCode(Handle<Code> code);
100 void DoDeferredNumberTagD(LNumberTagD* instr);
102 enum IntegerSignedness { SIGNED_INT32, UNSIGNED_INT32 };
103 void DoDeferredNumberTagIU(LInstruction* instr,
107 IntegerSignedness signedness);
109 void DoDeferredTaggedToI(LTaggedToI* instr);
110 void DoDeferredMathAbsTaggedHeapNumber(LMathAbs* instr);
111 void DoDeferredStackCheck(LStackCheck* instr);
112 void DoDeferredMaybeGrowElements(LMaybeGrowElements* instr);
113 void DoDeferredStringCharCodeAt(LStringCharCodeAt* instr);
114 void DoDeferredStringCharFromCode(LStringCharFromCode* instr);
115 void DoDeferredAllocate(LAllocate* instr);
116 void DoDeferredInstanceMigration(LCheckMaps* instr, Register object);
117 void DoDeferredLoadMutableDouble(LLoadFieldByIndex* instr,
122 // Parallel move support.
123 void DoParallelMove(LParallelMove* move);
124 void DoGap(LGap* instr);
126 MemOperand PrepareKeyedOperand(Register key,
128 bool key_is_constant,
134 // Emit frame translation commands for an environment.
135 void WriteTranslation(LEnvironment* environment, Translation* translation);
137 // Declare methods that deal with the individual node types.
138 #define DECLARE_DO(type) void Do##type(L##type* node);
139 LITHIUM_CONCRETE_INSTRUCTION_LIST(DECLARE_DO)
143 LanguageMode language_mode() const { return info()->language_mode(); }
145 Scope* scope() const { return scope_; }
147 Register scratch0() { return kLithiumScratchReg; }
148 Register scratch1() { return kLithiumScratchReg2; }
149 DoubleRegister double_scratch0() { return kLithiumScratchDouble; }
151 LInstruction* GetNextInstruction();
153 void EmitClassOfTest(Label* if_true,
155 Handle<String> class_name,
158 Register temporary2);
160 int GetStackSlotCount() const { return chunk()->spill_slot_count(); }
162 void AddDeferredCode(LDeferredCode* code) { deferred_.Add(code, zone()); }
164 void SaveCallerDoubles();
165 void RestoreCallerDoubles();
167 // Code generation passes. Returns true if code generation should
169 void GenerateBodyInstructionPre(LInstruction* instr) override;
170 bool GeneratePrologue();
171 bool GenerateDeferredCode();
172 bool GenerateJumpTable();
173 bool GenerateSafepointTable();
175 // Generates the custom OSR entrypoint and sets the osr_pc_offset.
176 void GenerateOsrPrologue();
179 RECORD_SIMPLE_SAFEPOINT,
180 RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS
183 void CallCode(Handle<Code> code,
184 RelocInfo::Mode mode,
185 LInstruction* instr);
187 void CallCodeGeneric(Handle<Code> code,
188 RelocInfo::Mode mode,
190 SafepointMode safepoint_mode);
192 void CallRuntime(const Runtime::Function* function,
195 SaveFPRegsMode save_doubles = kDontSaveFPRegs);
197 void CallRuntime(Runtime::FunctionId id,
199 LInstruction* instr) {
200 const Runtime::Function* function = Runtime::FunctionForId(id);
201 CallRuntime(function, num_arguments, instr);
204 void LoadContextFromDeferred(LOperand* context);
205 void CallRuntimeFromDeferred(Runtime::FunctionId id,
210 // Generate a direct call to a known function. Expects the function
212 void CallKnownFunction(Handle<JSFunction> function,
213 int formal_parameter_count, int arity,
214 LInstruction* instr);
216 void RecordSafepointWithLazyDeopt(LInstruction* instr,
217 SafepointMode safepoint_mode);
219 void RegisterEnvironmentForDeoptimization(LEnvironment* environment,
220 Safepoint::DeoptMode mode);
221 void DeoptimizeIf(Condition condition, LInstruction* instr,
222 Deoptimizer::DeoptReason deopt_reason,
223 Deoptimizer::BailoutType bailout_type,
224 Register src1 = zero_reg,
225 const Operand& src2 = Operand(zero_reg));
227 Condition condition, LInstruction* instr,
228 Deoptimizer::DeoptReason deopt_reason = Deoptimizer::kNoReason,
229 Register src1 = zero_reg, const Operand& src2 = Operand(zero_reg));
231 void AddToTranslation(LEnvironment* environment,
232 Translation* translation,
236 int* object_index_pointer,
237 int* dematerialized_index_pointer);
238 void PopulateDeoptimizationData(Handle<Code> code);
240 void PopulateDeoptimizationLiteralsWithInlinedFunctions();
242 Register ToRegister(int index) const;
243 DoubleRegister ToDoubleRegister(int index) const;
245 MemOperand BuildSeqStringOperand(Register string,
247 String::Encoding encoding);
249 void EmitIntegerMathAbs(LMathAbs* instr);
251 // Support for recording safepoint and position information.
252 void RecordSafepoint(LPointerMap* pointers,
253 Safepoint::Kind kind,
255 Safepoint::DeoptMode mode);
256 void RecordSafepoint(LPointerMap* pointers, Safepoint::DeoptMode mode);
257 void RecordSafepoint(Safepoint::DeoptMode mode);
258 void RecordSafepointWithRegisters(LPointerMap* pointers,
260 Safepoint::DeoptMode mode);
262 void RecordAndWritePosition(int position) override;
264 static Condition TokenToCondition(Token::Value op, bool is_unsigned);
265 void EmitGoto(int block);
267 // EmitBranch expects to be the last instruction of a block.
268 template<class InstrType>
269 void EmitBranch(InstrType instr,
272 const Operand& src2);
273 template<class InstrType>
274 void EmitBranchF(InstrType instr,
278 template <class InstrType>
279 void EmitTrueBranch(InstrType instr, Condition condition, Register src1,
280 const Operand& src2);
281 template <class InstrType>
282 void EmitFalseBranch(InstrType instr, Condition condition, Register src1,
283 const Operand& src2);
284 template<class InstrType>
285 void EmitFalseBranchF(InstrType instr,
289 void EmitCmpI(LOperand* left, LOperand* right);
290 void EmitNumberUntagD(LNumberUntagD* instr, Register input,
291 DoubleRegister result, NumberUntagDMode mode);
293 // Emits optimized code for typeof x == "y". Modifies input register.
294 // Returns the condition on which a final split to
295 // true and false label should be made, to optimize fallthrough.
296 // Returns two registers in cmp1 and cmp2 that can be used in the
297 // Branch instruction after EmitTypeofIs.
298 Condition EmitTypeofIs(Label* true_label,
301 Handle<String> type_name,
305 // Emits optimized code for %_IsObject(x). Preserves input register.
306 // Returns the condition on which a final split to
307 // true and false label should be made, to optimize fallthrough.
308 Condition EmitIsObject(Register input,
311 Label* is_not_object,
314 // Emits optimized code for %_IsString(x). Preserves input register.
315 // Returns the condition on which a final split to
316 // true and false label should be made, to optimize fallthrough.
317 Condition EmitIsString(Register input,
319 Label* is_not_string,
320 SmiCheck check_needed);
322 // Emits optimized code for %_IsConstructCall().
323 // Caller should branch on equal condition.
324 void EmitIsConstructCall(Register temp1, Register temp2);
326 // Emits optimized code to deep-copy the contents of statically known
327 // object graphs (e.g. object literal boilerplate).
328 void EmitDeepCopy(Handle<JSObject> object,
332 AllocationSiteMode mode);
333 // Emit optimized code for integer division.
334 // Inputs are signed.
335 // All registers are clobbered.
336 // If 'remainder' is no_reg, it is not computed.
337 void EmitSignedIntegerDivisionByConstant(Register result,
342 LEnvironment* environment);
345 void EnsureSpaceForLazyDeopt(int space_needed) override;
346 void DoLoadKeyedExternalArray(LLoadKeyed* instr);
347 void DoLoadKeyedFixedDoubleArray(LLoadKeyed* instr);
348 void DoLoadKeyedFixedArray(LLoadKeyed* instr);
349 void DoStoreKeyedExternalArray(LStoreKeyed* instr);
350 void DoStoreKeyedFixedDoubleArray(LStoreKeyed* instr);
351 void DoStoreKeyedFixedArray(LStoreKeyed* instr);
354 void EmitVectorLoadICRegisters(T* instr);
356 void EmitVectorStoreICRegisters(T* instr);
358 ZoneList<LEnvironment*> deoptimizations_;
359 ZoneList<Deoptimizer::JumpTableEntry> jump_table_;
360 int inlined_function_count_;
362 TranslationBuffer translations_;
363 ZoneList<LDeferredCode*> deferred_;
365 bool frame_is_built_;
367 // Builder that keeps track of safepoints in the code. The table
368 // itself is emitted at the end of the generated code.
369 SafepointTableBuilder safepoints_;
371 // Compiler from a set of parallel moves to a sequential list of moves.
372 LGapResolver resolver_;
374 Safepoint::Kind expected_safepoint_kind_;
376 class PushSafepointRegistersScope final BASE_EMBEDDED {
378 explicit PushSafepointRegistersScope(LCodeGen* codegen)
379 : codegen_(codegen) {
380 DCHECK(codegen_->info()->is_calling());
381 DCHECK(codegen_->expected_safepoint_kind_ == Safepoint::kSimple);
382 codegen_->expected_safepoint_kind_ = Safepoint::kWithRegisters;
384 StoreRegistersStateStub stub(codegen_->isolate());
385 codegen_->masm_->push(ra);
386 codegen_->masm_->CallStub(&stub);
389 ~PushSafepointRegistersScope() {
390 DCHECK(codegen_->expected_safepoint_kind_ == Safepoint::kWithRegisters);
391 RestoreRegistersStateStub stub(codegen_->isolate());
392 codegen_->masm_->push(ra);
393 codegen_->masm_->CallStub(&stub);
394 codegen_->expected_safepoint_kind_ = Safepoint::kSimple;
401 friend class LDeferredCode;
402 friend class LEnvironment;
403 friend class SafepointGenerator;
404 DISALLOW_COPY_AND_ASSIGN(LCodeGen);
408 class LDeferredCode : public ZoneObject {
410 explicit LDeferredCode(LCodeGen* codegen)
412 external_exit_(NULL),
413 instruction_index_(codegen->current_instruction_) {
414 codegen->AddDeferredCode(this);
417 virtual ~LDeferredCode() {}
418 virtual void Generate() = 0;
419 virtual LInstruction* instr() = 0;
421 void SetExit(Label* exit) { external_exit_ = exit; }
422 Label* entry() { return &entry_; }
423 Label* exit() { return external_exit_ != NULL ? external_exit_ : &exit_; }
424 int instruction_index() const { return instruction_index_; }
427 LCodeGen* codegen() const { return codegen_; }
428 MacroAssembler* masm() const { return codegen_->masm(); }
434 Label* external_exit_;
435 int instruction_index_;
438 } } // namespace v8::internal
440 #endif // V8_MIPS_LITHIUM_CODEGEN_MIPS_H_