1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #ifndef V8_MIPS_LITHIUM_CODEGEN_MIPS_H_
6 #define V8_MIPS_LITHIUM_CODEGEN_MIPS_H_
8 #include "deoptimizer.h"
9 #include "mips/lithium-gap-resolver-mips.h"
10 #include "mips/lithium-mips.h"
11 #include "lithium-codegen.h"
12 #include "safepoint-table.h"
19 // Forward declarations.
21 class SafepointGenerator;
23 class LCodeGen: public LCodeGenBase {
25 LCodeGen(LChunk* chunk, MacroAssembler* assembler, CompilationInfo* info)
26 : LCodeGenBase(chunk, assembler, info),
27 deoptimizations_(4, info->zone()),
28 deopt_jump_table_(4, info->zone()),
29 deoptimization_literals_(8, info->zone()),
30 inlined_function_count_(0),
31 scope_(info->scope()),
32 translations_(info->zone()),
33 deferred_(8, info->zone()),
35 frame_is_built_(false),
36 safepoints_(info->zone()),
38 expected_safepoint_kind_(Safepoint::kSimple) {
39 PopulateDeoptimizationLiteralsWithInlinedFunctions();
43 int LookupDestination(int block_id) const {
44 return chunk()->LookupDestination(block_id);
47 bool IsNextEmittedBlock(int block_id) const {
48 return LookupDestination(block_id) == GetNextEmittedBlock();
51 bool NeedsEagerFrame() const {
52 return GetStackSlotCount() > 0 ||
53 info()->is_non_deferred_calling() ||
55 info()->requires_frame();
57 bool NeedsDeferredFrame() const {
58 return !NeedsEagerFrame() && info()->is_deferred_calling();
61 RAStatus GetRAState() const {
62 return frame_is_built_ ? kRAHasBeenSaved : kRAHasNotBeenSaved;
65 // Support for converting LOperands to assembler types.
66 // LOperand must be a register.
67 Register ToRegister(LOperand* op) const;
69 // LOperand is loaded into scratch, unless already a register.
70 Register EmitLoadRegister(LOperand* op, Register scratch);
72 // LOperand must be a double register.
73 DoubleRegister ToDoubleRegister(LOperand* op) const;
75 // LOperand is loaded into dbl_scratch, unless already a double register.
76 DoubleRegister EmitLoadDoubleRegister(LOperand* op,
77 FloatRegister flt_scratch,
78 DoubleRegister dbl_scratch);
79 int32_t ToRepresentation(LConstantOperand* op, const Representation& r) const;
80 int32_t ToInteger32(LConstantOperand* op) const;
81 Smi* ToSmi(LConstantOperand* op) const;
82 double ToDouble(LConstantOperand* op) const;
83 Operand ToOperand(LOperand* op);
84 MemOperand ToMemOperand(LOperand* op) const;
85 // Returns a MemOperand pointing to the high word of a DoubleStackSlot.
86 MemOperand ToHighMemOperand(LOperand* op) const;
88 bool IsInteger32(LConstantOperand* op) const;
89 bool IsSmi(LConstantOperand* op) const;
90 Handle<Object> ToHandle(LConstantOperand* op) const;
92 // Try to generate code for the entire chunk, but it may fail if the
93 // chunk contains constructs we cannot handle. Returns true if the
94 // code generation attempt succeeded.
97 // Finish the code by setting stack height, safepoint, and bailout
99 void FinishCode(Handle<Code> code);
101 void DoDeferredNumberTagD(LNumberTagD* instr);
103 enum IntegerSignedness { SIGNED_INT32, UNSIGNED_INT32 };
104 void DoDeferredNumberTagIU(LInstruction* instr,
108 IntegerSignedness signedness);
110 void DoDeferredTaggedToI(LTaggedToI* instr);
111 void DoDeferredMathAbsTaggedHeapNumber(LMathAbs* instr);
112 void DoDeferredStackCheck(LStackCheck* instr);
113 void DoDeferredStringCharCodeAt(LStringCharCodeAt* instr);
114 void DoDeferredStringCharFromCode(LStringCharFromCode* instr);
115 void DoDeferredAllocate(LAllocate* instr);
116 void DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
119 void DoDeferredInstanceMigration(LCheckMaps* instr, Register object);
120 void DoDeferredLoadMutableDouble(LLoadFieldByIndex* instr,
125 // Parallel move support.
126 void DoParallelMove(LParallelMove* move);
127 void DoGap(LGap* instr);
129 MemOperand PrepareKeyedOperand(Register key,
131 bool key_is_constant,
135 int additional_index,
136 int additional_offset);
138 // Emit frame translation commands for an environment.
139 void WriteTranslation(LEnvironment* environment, Translation* translation);
141 // Declare methods that deal with the individual node types.
142 #define DECLARE_DO(type) void Do##type(L##type* node);
143 LITHIUM_CONCRETE_INSTRUCTION_LIST(DECLARE_DO)
147 StrictMode strict_mode() const { return info()->strict_mode(); }
149 Scope* scope() const { return scope_; }
151 Register scratch0() { return kLithiumScratchReg; }
152 Register scratch1() { return kLithiumScratchReg2; }
153 DoubleRegister double_scratch0() { return kLithiumScratchDouble; }
155 LInstruction* GetNextInstruction();
157 void EmitClassOfTest(Label* if_true,
159 Handle<String> class_name,
162 Register temporary2);
164 int GetStackSlotCount() const { return chunk()->spill_slot_count(); }
166 void AddDeferredCode(LDeferredCode* code) { deferred_.Add(code, zone()); }
168 void SaveCallerDoubles();
169 void RestoreCallerDoubles();
171 // Code generation passes. Returns true if code generation should
173 void GenerateBodyInstructionPre(LInstruction* instr) V8_OVERRIDE;
174 bool GeneratePrologue();
175 bool GenerateDeferredCode();
176 bool GenerateDeoptJumpTable();
177 bool GenerateSafepointTable();
179 // Generates the custom OSR entrypoint and sets the osr_pc_offset.
180 void GenerateOsrPrologue();
183 RECORD_SIMPLE_SAFEPOINT,
184 RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS
187 void CallCode(Handle<Code> code,
188 RelocInfo::Mode mode,
189 LInstruction* instr);
191 void CallCodeGeneric(Handle<Code> code,
192 RelocInfo::Mode mode,
194 SafepointMode safepoint_mode);
196 void CallRuntime(const Runtime::Function* function,
199 SaveFPRegsMode save_doubles = kDontSaveFPRegs);
201 void CallRuntime(Runtime::FunctionId id,
203 LInstruction* instr) {
204 const Runtime::Function* function = Runtime::FunctionForId(id);
205 CallRuntime(function, num_arguments, instr);
208 void LoadContextFromDeferred(LOperand* context);
209 void CallRuntimeFromDeferred(Runtime::FunctionId id,
219 // Generate a direct call to a known function. Expects the function
221 void CallKnownFunction(Handle<JSFunction> function,
222 int formal_parameter_count,
227 void RecordSafepointWithLazyDeopt(LInstruction* instr,
228 SafepointMode safepoint_mode);
230 void RegisterEnvironmentForDeoptimization(LEnvironment* environment,
231 Safepoint::DeoptMode mode);
232 void DeoptimizeIf(Condition condition,
233 LEnvironment* environment,
234 Deoptimizer::BailoutType bailout_type,
235 Register src1 = zero_reg,
236 const Operand& src2 = Operand(zero_reg));
237 void DeoptimizeIf(Condition condition,
238 LEnvironment* environment,
239 Register src1 = zero_reg,
240 const Operand& src2 = Operand(zero_reg));
242 void AddToTranslation(LEnvironment* environment,
243 Translation* translation,
247 int* object_index_pointer,
248 int* dematerialized_index_pointer);
249 void PopulateDeoptimizationData(Handle<Code> code);
250 int DefineDeoptimizationLiteral(Handle<Object> literal);
252 void PopulateDeoptimizationLiteralsWithInlinedFunctions();
254 Register ToRegister(int index) const;
255 DoubleRegister ToDoubleRegister(int index) const;
257 MemOperand BuildSeqStringOperand(Register string,
259 String::Encoding encoding);
261 void EmitIntegerMathAbs(LMathAbs* instr);
263 // Support for recording safepoint and position information.
264 void RecordSafepoint(LPointerMap* pointers,
265 Safepoint::Kind kind,
267 Safepoint::DeoptMode mode);
268 void RecordSafepoint(LPointerMap* pointers, Safepoint::DeoptMode mode);
269 void RecordSafepoint(Safepoint::DeoptMode mode);
270 void RecordSafepointWithRegisters(LPointerMap* pointers,
272 Safepoint::DeoptMode mode);
273 void RecordSafepointWithRegistersAndDoubles(LPointerMap* pointers,
275 Safepoint::DeoptMode mode);
277 void RecordAndWritePosition(int position) V8_OVERRIDE;
279 static Condition TokenToCondition(Token::Value op, bool is_unsigned);
280 void EmitGoto(int block);
282 // EmitBranch expects to be the last instruction of a block.
283 template<class InstrType>
284 void EmitBranch(InstrType instr,
287 const Operand& src2);
288 template<class InstrType>
289 void EmitBranchF(InstrType instr,
293 template<class InstrType>
294 void EmitFalseBranch(InstrType instr,
297 const Operand& src2);
298 template<class InstrType>
299 void EmitFalseBranchF(InstrType instr,
303 void EmitCmpI(LOperand* left, LOperand* right);
304 void EmitNumberUntagD(Register input,
305 DoubleRegister result,
306 bool allow_undefined_as_nan,
307 bool deoptimize_on_minus_zero,
309 NumberUntagDMode mode);
311 // Emits optimized code for typeof x == "y". Modifies input register.
312 // Returns the condition on which a final split to
313 // true and false label should be made, to optimize fallthrough.
314 // Returns two registers in cmp1 and cmp2 that can be used in the
315 // Branch instruction after EmitTypeofIs.
316 Condition EmitTypeofIs(Label* true_label,
319 Handle<String> type_name,
323 // Emits optimized code for %_IsObject(x). Preserves input register.
324 // Returns the condition on which a final split to
325 // true and false label should be made, to optimize fallthrough.
326 Condition EmitIsObject(Register input,
329 Label* is_not_object,
332 // Emits optimized code for %_IsString(x). Preserves input register.
333 // Returns the condition on which a final split to
334 // true and false label should be made, to optimize fallthrough.
335 Condition EmitIsString(Register input,
337 Label* is_not_string,
338 SmiCheck check_needed);
340 // Emits optimized code for %_IsConstructCall().
341 // Caller should branch on equal condition.
342 void EmitIsConstructCall(Register temp1, Register temp2);
344 // Emits optimized code to deep-copy the contents of statically known
345 // object graphs (e.g. object literal boilerplate).
346 void EmitDeepCopy(Handle<JSObject> object,
350 AllocationSiteMode mode);
351 // Emit optimized code for integer division.
352 // Inputs are signed.
353 // All registers are clobbered.
354 // If 'remainder' is no_reg, it is not computed.
355 void EmitSignedIntegerDivisionByConstant(Register result,
360 LEnvironment* environment);
363 void EnsureSpaceForLazyDeopt(int space_needed) V8_OVERRIDE;
364 void DoLoadKeyedExternalArray(LLoadKeyed* instr);
365 void DoLoadKeyedFixedDoubleArray(LLoadKeyed* instr);
366 void DoLoadKeyedFixedArray(LLoadKeyed* instr);
367 void DoStoreKeyedExternalArray(LStoreKeyed* instr);
368 void DoStoreKeyedFixedDoubleArray(LStoreKeyed* instr);
369 void DoStoreKeyedFixedArray(LStoreKeyed* instr);
371 ZoneList<LEnvironment*> deoptimizations_;
372 ZoneList<Deoptimizer::JumpTableEntry> deopt_jump_table_;
373 ZoneList<Handle<Object> > deoptimization_literals_;
374 int inlined_function_count_;
376 TranslationBuffer translations_;
377 ZoneList<LDeferredCode*> deferred_;
379 bool frame_is_built_;
381 // Builder that keeps track of safepoints in the code. The table
382 // itself is emitted at the end of the generated code.
383 SafepointTableBuilder safepoints_;
385 // Compiler from a set of parallel moves to a sequential list of moves.
386 LGapResolver resolver_;
388 Safepoint::Kind expected_safepoint_kind_;
390 class PushSafepointRegistersScope V8_FINAL BASE_EMBEDDED {
392 PushSafepointRegistersScope(LCodeGen* codegen,
393 Safepoint::Kind kind)
394 : codegen_(codegen) {
395 ASSERT(codegen_->info()->is_calling());
396 ASSERT(codegen_->expected_safepoint_kind_ == Safepoint::kSimple);
397 codegen_->expected_safepoint_kind_ = kind;
399 switch (codegen_->expected_safepoint_kind_) {
400 case Safepoint::kWithRegisters: {
401 StoreRegistersStateStub stub1(codegen_->masm_->isolate(),
403 codegen_->masm_->push(ra);
404 codegen_->masm_->CallStub(&stub1);
407 case Safepoint::kWithRegistersAndDoubles: {
408 StoreRegistersStateStub stub2(codegen_->masm_->isolate(),
410 codegen_->masm_->push(ra);
411 codegen_->masm_->CallStub(&stub2);
419 ~PushSafepointRegistersScope() {
420 Safepoint::Kind kind = codegen_->expected_safepoint_kind_;
421 ASSERT((kind & Safepoint::kWithRegisters) != 0);
423 case Safepoint::kWithRegisters: {
424 RestoreRegistersStateStub stub1(codegen_->masm_->isolate(),
426 codegen_->masm_->push(ra);
427 codegen_->masm_->CallStub(&stub1);
430 case Safepoint::kWithRegistersAndDoubles: {
431 RestoreRegistersStateStub stub2(codegen_->masm_->isolate(),
433 codegen_->masm_->push(ra);
434 codegen_->masm_->CallStub(&stub2);
440 codegen_->expected_safepoint_kind_ = Safepoint::kSimple;
447 friend class LDeferredCode;
448 friend class LEnvironment;
449 friend class SafepointGenerator;
450 DISALLOW_COPY_AND_ASSIGN(LCodeGen);
454 class LDeferredCode : public ZoneObject {
456 explicit LDeferredCode(LCodeGen* codegen)
458 external_exit_(NULL),
459 instruction_index_(codegen->current_instruction_) {
460 codegen->AddDeferredCode(this);
463 virtual ~LDeferredCode() {}
464 virtual void Generate() = 0;
465 virtual LInstruction* instr() = 0;
467 void SetExit(Label* exit) { external_exit_ = exit; }
468 Label* entry() { return &entry_; }
469 Label* exit() { return external_exit_ != NULL ? external_exit_ : &exit_; }
470 int instruction_index() const { return instruction_index_; }
473 LCodeGen* codegen() const { return codegen_; }
474 MacroAssembler* masm() const { return codegen_->masm(); }
480 Label* external_exit_;
481 int instruction_index_;
484 } } // namespace v8::internal
486 #endif // V8_MIPS_LITHIUM_CODEGEN_MIPS_H_