Upstream version 7.36.149.0
[platform/framework/web/crosswalk.git] / src / v8 / src / mips / lithium-codegen-mips.h
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #ifndef V8_MIPS_LITHIUM_CODEGEN_MIPS_H_
6 #define V8_MIPS_LITHIUM_CODEGEN_MIPS_H_
7
8 #include "deoptimizer.h"
9 #include "mips/lithium-gap-resolver-mips.h"
10 #include "mips/lithium-mips.h"
11 #include "lithium-codegen.h"
12 #include "safepoint-table.h"
13 #include "scopes.h"
14 #include "utils.h"
15
16 namespace v8 {
17 namespace internal {
18
19 // Forward declarations.
20 class LDeferredCode;
21 class SafepointGenerator;
22
23 class LCodeGen: public LCodeGenBase {
24  public:
25   LCodeGen(LChunk* chunk, MacroAssembler* assembler, CompilationInfo* info)
26       : LCodeGenBase(chunk, assembler, info),
27         deoptimizations_(4, info->zone()),
28         deopt_jump_table_(4, info->zone()),
29         deoptimization_literals_(8, info->zone()),
30         inlined_function_count_(0),
31         scope_(info->scope()),
32         translations_(info->zone()),
33         deferred_(8, info->zone()),
34         osr_pc_offset_(-1),
35         frame_is_built_(false),
36         safepoints_(info->zone()),
37         resolver_(this),
38         expected_safepoint_kind_(Safepoint::kSimple) {
39     PopulateDeoptimizationLiteralsWithInlinedFunctions();
40   }
41
42
43   int LookupDestination(int block_id) const {
44     return chunk()->LookupDestination(block_id);
45   }
46
47   bool IsNextEmittedBlock(int block_id) const {
48     return LookupDestination(block_id) == GetNextEmittedBlock();
49   }
50
51   bool NeedsEagerFrame() const {
52     return GetStackSlotCount() > 0 ||
53         info()->is_non_deferred_calling() ||
54         !info()->IsStub() ||
55         info()->requires_frame();
56   }
57   bool NeedsDeferredFrame() const {
58     return !NeedsEagerFrame() && info()->is_deferred_calling();
59   }
60
61   RAStatus GetRAState() const {
62     return frame_is_built_ ? kRAHasBeenSaved : kRAHasNotBeenSaved;
63   }
64
65   // Support for converting LOperands to assembler types.
66   // LOperand must be a register.
67   Register ToRegister(LOperand* op) const;
68
69   // LOperand is loaded into scratch, unless already a register.
70   Register EmitLoadRegister(LOperand* op, Register scratch);
71
72   // LOperand must be a double register.
73   DoubleRegister ToDoubleRegister(LOperand* op) const;
74
75   // LOperand is loaded into dbl_scratch, unless already a double register.
76   DoubleRegister EmitLoadDoubleRegister(LOperand* op,
77                                         FloatRegister flt_scratch,
78                                         DoubleRegister dbl_scratch);
79   int32_t ToRepresentation(LConstantOperand* op, const Representation& r) const;
80   int32_t ToInteger32(LConstantOperand* op) const;
81   Smi* ToSmi(LConstantOperand* op) const;
82   double ToDouble(LConstantOperand* op) const;
83   Operand ToOperand(LOperand* op);
84   MemOperand ToMemOperand(LOperand* op) const;
85   // Returns a MemOperand pointing to the high word of a DoubleStackSlot.
86   MemOperand ToHighMemOperand(LOperand* op) const;
87
88   bool IsInteger32(LConstantOperand* op) const;
89   bool IsSmi(LConstantOperand* op) const;
90   Handle<Object> ToHandle(LConstantOperand* op) const;
91
92   // Try to generate code for the entire chunk, but it may fail if the
93   // chunk contains constructs we cannot handle. Returns true if the
94   // code generation attempt succeeded.
95   bool GenerateCode();
96
97   // Finish the code by setting stack height, safepoint, and bailout
98   // information on it.
99   void FinishCode(Handle<Code> code);
100
101   void DoDeferredNumberTagD(LNumberTagD* instr);
102
103   enum IntegerSignedness { SIGNED_INT32, UNSIGNED_INT32 };
104   void DoDeferredNumberTagIU(LInstruction* instr,
105                              LOperand* value,
106                              LOperand* temp1,
107                              LOperand* temp2,
108                              IntegerSignedness signedness);
109
110   void DoDeferredTaggedToI(LTaggedToI* instr);
111   void DoDeferredMathAbsTaggedHeapNumber(LMathAbs* instr);
112   void DoDeferredStackCheck(LStackCheck* instr);
113   void DoDeferredStringCharCodeAt(LStringCharCodeAt* instr);
114   void DoDeferredStringCharFromCode(LStringCharFromCode* instr);
115   void DoDeferredAllocate(LAllocate* instr);
116   void DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
117                                        Label* map_check);
118
119   void DoDeferredInstanceMigration(LCheckMaps* instr, Register object);
120   void DoDeferredLoadMutableDouble(LLoadFieldByIndex* instr,
121                                    Register result,
122                                    Register object,
123                                    Register index);
124
125   // Parallel move support.
126   void DoParallelMove(LParallelMove* move);
127   void DoGap(LGap* instr);
128
129   MemOperand PrepareKeyedOperand(Register key,
130                                  Register base,
131                                  bool key_is_constant,
132                                  int constant_key,
133                                  int element_size,
134                                  int shift_size,
135                                  int additional_index,
136                                  int additional_offset);
137
138   // Emit frame translation commands for an environment.
139   void WriteTranslation(LEnvironment* environment, Translation* translation);
140
141   // Declare methods that deal with the individual node types.
142 #define DECLARE_DO(type) void Do##type(L##type* node);
143   LITHIUM_CONCRETE_INSTRUCTION_LIST(DECLARE_DO)
144 #undef DECLARE_DO
145
146  private:
147   StrictMode strict_mode() const { return info()->strict_mode(); }
148
149   Scope* scope() const { return scope_; }
150
151   Register scratch0() { return kLithiumScratchReg; }
152   Register scratch1() { return kLithiumScratchReg2; }
153   DoubleRegister double_scratch0() { return kLithiumScratchDouble; }
154
155   LInstruction* GetNextInstruction();
156
157   void EmitClassOfTest(Label* if_true,
158                        Label* if_false,
159                        Handle<String> class_name,
160                        Register input,
161                        Register temporary,
162                        Register temporary2);
163
164   int GetStackSlotCount() const { return chunk()->spill_slot_count(); }
165
166   void AddDeferredCode(LDeferredCode* code) { deferred_.Add(code, zone()); }
167
168   void SaveCallerDoubles();
169   void RestoreCallerDoubles();
170
171   // Code generation passes.  Returns true if code generation should
172   // continue.
173   void GenerateBodyInstructionPre(LInstruction* instr) V8_OVERRIDE;
174   bool GeneratePrologue();
175   bool GenerateDeferredCode();
176   bool GenerateDeoptJumpTable();
177   bool GenerateSafepointTable();
178
179   // Generates the custom OSR entrypoint and sets the osr_pc_offset.
180   void GenerateOsrPrologue();
181
182   enum SafepointMode {
183     RECORD_SIMPLE_SAFEPOINT,
184     RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS
185   };
186
187   void CallCode(Handle<Code> code,
188                 RelocInfo::Mode mode,
189                 LInstruction* instr);
190
191   void CallCodeGeneric(Handle<Code> code,
192                        RelocInfo::Mode mode,
193                        LInstruction* instr,
194                        SafepointMode safepoint_mode);
195
196   void CallRuntime(const Runtime::Function* function,
197                    int num_arguments,
198                    LInstruction* instr,
199                    SaveFPRegsMode save_doubles = kDontSaveFPRegs);
200
201   void CallRuntime(Runtime::FunctionId id,
202                    int num_arguments,
203                    LInstruction* instr) {
204     const Runtime::Function* function = Runtime::FunctionForId(id);
205     CallRuntime(function, num_arguments, instr);
206   }
207
208   void LoadContextFromDeferred(LOperand* context);
209   void CallRuntimeFromDeferred(Runtime::FunctionId id,
210                                int argc,
211                                LInstruction* instr,
212                                LOperand* context);
213
214   enum A1State {
215     A1_UNINITIALIZED,
216     A1_CONTAINS_TARGET
217   };
218
219   // Generate a direct call to a known function.  Expects the function
220   // to be in a1.
221   void CallKnownFunction(Handle<JSFunction> function,
222                          int formal_parameter_count,
223                          int arity,
224                          LInstruction* instr,
225                          A1State a1_state);
226
227   void RecordSafepointWithLazyDeopt(LInstruction* instr,
228                                     SafepointMode safepoint_mode);
229
230   void RegisterEnvironmentForDeoptimization(LEnvironment* environment,
231                                             Safepoint::DeoptMode mode);
232   void DeoptimizeIf(Condition condition,
233                     LEnvironment* environment,
234                     Deoptimizer::BailoutType bailout_type,
235                     Register src1 = zero_reg,
236                     const Operand& src2 = Operand(zero_reg));
237   void DeoptimizeIf(Condition condition,
238                     LEnvironment* environment,
239                     Register src1 = zero_reg,
240                     const Operand& src2 = Operand(zero_reg));
241
242   void AddToTranslation(LEnvironment* environment,
243                         Translation* translation,
244                         LOperand* op,
245                         bool is_tagged,
246                         bool is_uint32,
247                         int* object_index_pointer,
248                         int* dematerialized_index_pointer);
249   void PopulateDeoptimizationData(Handle<Code> code);
250   int DefineDeoptimizationLiteral(Handle<Object> literal);
251
252   void PopulateDeoptimizationLiteralsWithInlinedFunctions();
253
254   Register ToRegister(int index) const;
255   DoubleRegister ToDoubleRegister(int index) const;
256
257   MemOperand BuildSeqStringOperand(Register string,
258                                    LOperand* index,
259                                    String::Encoding encoding);
260
261   void EmitIntegerMathAbs(LMathAbs* instr);
262
263   // Support for recording safepoint and position information.
264   void RecordSafepoint(LPointerMap* pointers,
265                        Safepoint::Kind kind,
266                        int arguments,
267                        Safepoint::DeoptMode mode);
268   void RecordSafepoint(LPointerMap* pointers, Safepoint::DeoptMode mode);
269   void RecordSafepoint(Safepoint::DeoptMode mode);
270   void RecordSafepointWithRegisters(LPointerMap* pointers,
271                                     int arguments,
272                                     Safepoint::DeoptMode mode);
273   void RecordSafepointWithRegistersAndDoubles(LPointerMap* pointers,
274                                               int arguments,
275                                               Safepoint::DeoptMode mode);
276
277   void RecordAndWritePosition(int position) V8_OVERRIDE;
278
279   static Condition TokenToCondition(Token::Value op, bool is_unsigned);
280   void EmitGoto(int block);
281
282   // EmitBranch expects to be the last instruction of a block.
283   template<class InstrType>
284   void EmitBranch(InstrType instr,
285                   Condition condition,
286                   Register src1,
287                   const Operand& src2);
288   template<class InstrType>
289   void EmitBranchF(InstrType instr,
290                    Condition condition,
291                    FPURegister src1,
292                    FPURegister src2);
293   template<class InstrType>
294   void EmitFalseBranch(InstrType instr,
295                        Condition condition,
296                        Register src1,
297                        const Operand& src2);
298   template<class InstrType>
299   void EmitFalseBranchF(InstrType instr,
300                         Condition condition,
301                         FPURegister src1,
302                         FPURegister src2);
303   void EmitCmpI(LOperand* left, LOperand* right);
304   void EmitNumberUntagD(Register input,
305                         DoubleRegister result,
306                         bool allow_undefined_as_nan,
307                         bool deoptimize_on_minus_zero,
308                         LEnvironment* env,
309                         NumberUntagDMode mode);
310
311   // Emits optimized code for typeof x == "y".  Modifies input register.
312   // Returns the condition on which a final split to
313   // true and false label should be made, to optimize fallthrough.
314   // Returns two registers in cmp1 and cmp2 that can be used in the
315   // Branch instruction after EmitTypeofIs.
316   Condition EmitTypeofIs(Label* true_label,
317                          Label* false_label,
318                          Register input,
319                          Handle<String> type_name,
320                          Register& cmp1,
321                          Operand& cmp2);
322
323   // Emits optimized code for %_IsObject(x).  Preserves input register.
324   // Returns the condition on which a final split to
325   // true and false label should be made, to optimize fallthrough.
326   Condition EmitIsObject(Register input,
327                          Register temp1,
328                          Register temp2,
329                          Label* is_not_object,
330                          Label* is_object);
331
332   // Emits optimized code for %_IsString(x).  Preserves input register.
333   // Returns the condition on which a final split to
334   // true and false label should be made, to optimize fallthrough.
335   Condition EmitIsString(Register input,
336                          Register temp1,
337                          Label* is_not_string,
338                          SmiCheck check_needed);
339
340   // Emits optimized code for %_IsConstructCall().
341   // Caller should branch on equal condition.
342   void EmitIsConstructCall(Register temp1, Register temp2);
343
344   // Emits optimized code to deep-copy the contents of statically known
345   // object graphs (e.g. object literal boilerplate).
346   void EmitDeepCopy(Handle<JSObject> object,
347                     Register result,
348                     Register source,
349                     int* offset,
350                     AllocationSiteMode mode);
351   // Emit optimized code for integer division.
352   // Inputs are signed.
353   // All registers are clobbered.
354   // If 'remainder' is no_reg, it is not computed.
355   void EmitSignedIntegerDivisionByConstant(Register result,
356                                            Register dividend,
357                                            int32_t divisor,
358                                            Register remainder,
359                                            Register scratch,
360                                            LEnvironment* environment);
361
362
363   void EnsureSpaceForLazyDeopt(int space_needed) V8_OVERRIDE;
364   void DoLoadKeyedExternalArray(LLoadKeyed* instr);
365   void DoLoadKeyedFixedDoubleArray(LLoadKeyed* instr);
366   void DoLoadKeyedFixedArray(LLoadKeyed* instr);
367   void DoStoreKeyedExternalArray(LStoreKeyed* instr);
368   void DoStoreKeyedFixedDoubleArray(LStoreKeyed* instr);
369   void DoStoreKeyedFixedArray(LStoreKeyed* instr);
370
371   ZoneList<LEnvironment*> deoptimizations_;
372   ZoneList<Deoptimizer::JumpTableEntry> deopt_jump_table_;
373   ZoneList<Handle<Object> > deoptimization_literals_;
374   int inlined_function_count_;
375   Scope* const scope_;
376   TranslationBuffer translations_;
377   ZoneList<LDeferredCode*> deferred_;
378   int osr_pc_offset_;
379   bool frame_is_built_;
380
381   // Builder that keeps track of safepoints in the code. The table
382   // itself is emitted at the end of the generated code.
383   SafepointTableBuilder safepoints_;
384
385   // Compiler from a set of parallel moves to a sequential list of moves.
386   LGapResolver resolver_;
387
388   Safepoint::Kind expected_safepoint_kind_;
389
390   class PushSafepointRegistersScope V8_FINAL  BASE_EMBEDDED {
391    public:
392     PushSafepointRegistersScope(LCodeGen* codegen,
393                                 Safepoint::Kind kind)
394         : codegen_(codegen) {
395       ASSERT(codegen_->info()->is_calling());
396       ASSERT(codegen_->expected_safepoint_kind_ == Safepoint::kSimple);
397       codegen_->expected_safepoint_kind_ = kind;
398
399       switch (codegen_->expected_safepoint_kind_) {
400         case Safepoint::kWithRegisters: {
401           StoreRegistersStateStub stub1(codegen_->masm_->isolate(),
402                                         kDontSaveFPRegs);
403           codegen_->masm_->push(ra);
404           codegen_->masm_->CallStub(&stub1);
405           break;
406         }
407         case Safepoint::kWithRegistersAndDoubles: {
408           StoreRegistersStateStub stub2(codegen_->masm_->isolate(),
409                                         kSaveFPRegs);
410           codegen_->masm_->push(ra);
411           codegen_->masm_->CallStub(&stub2);
412           break;
413         }
414         default:
415           UNREACHABLE();
416       }
417     }
418
419     ~PushSafepointRegistersScope() {
420       Safepoint::Kind kind = codegen_->expected_safepoint_kind_;
421       ASSERT((kind & Safepoint::kWithRegisters) != 0);
422       switch (kind) {
423         case Safepoint::kWithRegisters: {
424           RestoreRegistersStateStub stub1(codegen_->masm_->isolate(),
425                                           kDontSaveFPRegs);
426           codegen_->masm_->push(ra);
427           codegen_->masm_->CallStub(&stub1);
428           break;
429         }
430         case Safepoint::kWithRegistersAndDoubles: {
431           RestoreRegistersStateStub stub2(codegen_->masm_->isolate(),
432                                           kSaveFPRegs);
433           codegen_->masm_->push(ra);
434           codegen_->masm_->CallStub(&stub2);
435           break;
436         }
437         default:
438           UNREACHABLE();
439       }
440       codegen_->expected_safepoint_kind_ = Safepoint::kSimple;
441     }
442
443    private:
444     LCodeGen* codegen_;
445   };
446
447   friend class LDeferredCode;
448   friend class LEnvironment;
449   friend class SafepointGenerator;
450   DISALLOW_COPY_AND_ASSIGN(LCodeGen);
451 };
452
453
454 class LDeferredCode : public ZoneObject {
455  public:
456   explicit LDeferredCode(LCodeGen* codegen)
457       : codegen_(codegen),
458         external_exit_(NULL),
459         instruction_index_(codegen->current_instruction_) {
460     codegen->AddDeferredCode(this);
461   }
462
463   virtual ~LDeferredCode() {}
464   virtual void Generate() = 0;
465   virtual LInstruction* instr() = 0;
466
467   void SetExit(Label* exit) { external_exit_ = exit; }
468   Label* entry() { return &entry_; }
469   Label* exit() { return external_exit_ != NULL ? external_exit_ : &exit_; }
470   int instruction_index() const { return instruction_index_; }
471
472  protected:
473   LCodeGen* codegen() const { return codegen_; }
474   MacroAssembler* masm() const { return codegen_->masm(); }
475
476  private:
477   LCodeGen* codegen_;
478   Label entry_;
479   Label exit_;
480   Label* external_exit_;
481   int instruction_index_;
482 };
483
484 } }  // namespace v8::internal
485
486 #endif  // V8_MIPS_LITHIUM_CODEGEN_MIPS_H_