Upstream version 9.38.198.0
[platform/framework/web/crosswalk.git] / src / v8 / src / mips / lithium-codegen-mips.h
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #ifndef V8_MIPS_LITHIUM_CODEGEN_MIPS_H_
6 #define V8_MIPS_LITHIUM_CODEGEN_MIPS_H_
7
8 #include "src/deoptimizer.h"
9 #include "src/lithium-codegen.h"
10 #include "src/mips/lithium-gap-resolver-mips.h"
11 #include "src/mips/lithium-mips.h"
12 #include "src/safepoint-table.h"
13 #include "src/scopes.h"
14 #include "src/utils.h"
15
16 namespace v8 {
17 namespace internal {
18
19 // Forward declarations.
20 class LDeferredCode;
21 class SafepointGenerator;
22
23 class LCodeGen: public LCodeGenBase {
24  public:
25   LCodeGen(LChunk* chunk, MacroAssembler* assembler, CompilationInfo* info)
26       : LCodeGenBase(chunk, assembler, info),
27         deoptimizations_(4, info->zone()),
28         deopt_jump_table_(4, info->zone()),
29         deoptimization_literals_(8, info->zone()),
30         inlined_function_count_(0),
31         scope_(info->scope()),
32         translations_(info->zone()),
33         deferred_(8, info->zone()),
34         osr_pc_offset_(-1),
35         frame_is_built_(false),
36         safepoints_(info->zone()),
37         resolver_(this),
38         expected_safepoint_kind_(Safepoint::kSimple) {
39     PopulateDeoptimizationLiteralsWithInlinedFunctions();
40   }
41
42
43   int LookupDestination(int block_id) const {
44     return chunk()->LookupDestination(block_id);
45   }
46
47   bool IsNextEmittedBlock(int block_id) const {
48     return LookupDestination(block_id) == GetNextEmittedBlock();
49   }
50
51   bool NeedsEagerFrame() const {
52     return GetStackSlotCount() > 0 ||
53         info()->is_non_deferred_calling() ||
54         !info()->IsStub() ||
55         info()->requires_frame();
56   }
57   bool NeedsDeferredFrame() const {
58     return !NeedsEagerFrame() && info()->is_deferred_calling();
59   }
60
61   RAStatus GetRAState() const {
62     return frame_is_built_ ? kRAHasBeenSaved : kRAHasNotBeenSaved;
63   }
64
65   // Support for converting LOperands to assembler types.
66   // LOperand must be a register.
67   Register ToRegister(LOperand* op) const;
68
69   // LOperand is loaded into scratch, unless already a register.
70   Register EmitLoadRegister(LOperand* op, Register scratch);
71
72   // LOperand must be a double register.
73   DoubleRegister ToDoubleRegister(LOperand* op) const;
74
75   // LOperand is loaded into dbl_scratch, unless already a double register.
76   DoubleRegister EmitLoadDoubleRegister(LOperand* op,
77                                         FloatRegister flt_scratch,
78                                         DoubleRegister dbl_scratch);
79   int32_t ToRepresentation(LConstantOperand* op, const Representation& r) const;
80   int32_t ToInteger32(LConstantOperand* op) const;
81   Smi* ToSmi(LConstantOperand* op) const;
82   double ToDouble(LConstantOperand* op) const;
83   Operand ToOperand(LOperand* op);
84   MemOperand ToMemOperand(LOperand* op) const;
85   // Returns a MemOperand pointing to the high word of a DoubleStackSlot.
86   MemOperand ToHighMemOperand(LOperand* op) const;
87
88   bool IsInteger32(LConstantOperand* op) const;
89   bool IsSmi(LConstantOperand* op) const;
90   Handle<Object> ToHandle(LConstantOperand* op) const;
91
92   // Try to generate code for the entire chunk, but it may fail if the
93   // chunk contains constructs we cannot handle. Returns true if the
94   // code generation attempt succeeded.
95   bool GenerateCode();
96
97   // Finish the code by setting stack height, safepoint, and bailout
98   // information on it.
99   void FinishCode(Handle<Code> code);
100
101   void DoDeferredNumberTagD(LNumberTagD* instr);
102
103   enum IntegerSignedness { SIGNED_INT32, UNSIGNED_INT32 };
104   void DoDeferredNumberTagIU(LInstruction* instr,
105                              LOperand* value,
106                              LOperand* temp1,
107                              LOperand* temp2,
108                              IntegerSignedness signedness);
109
110   void DoDeferredTaggedToI(LTaggedToI* instr);
111   void DoDeferredMathAbsTaggedHeapNumber(LMathAbs* instr);
112   void DoDeferredStackCheck(LStackCheck* instr);
113   void DoDeferredStringCharCodeAt(LStringCharCodeAt* instr);
114   void DoDeferredStringCharFromCode(LStringCharFromCode* instr);
115   void DoDeferredAllocate(LAllocate* instr);
116   void DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
117                                        Label* map_check);
118
119   void DoDeferredInstanceMigration(LCheckMaps* instr, Register object);
120   void DoDeferredLoadMutableDouble(LLoadFieldByIndex* instr,
121                                    Register result,
122                                    Register object,
123                                    Register index);
124
125   // Parallel move support.
126   void DoParallelMove(LParallelMove* move);
127   void DoGap(LGap* instr);
128
129   MemOperand PrepareKeyedOperand(Register key,
130                                  Register base,
131                                  bool key_is_constant,
132                                  int constant_key,
133                                  int element_size,
134                                  int shift_size,
135                                  int base_offset);
136
137   // Emit frame translation commands for an environment.
138   void WriteTranslation(LEnvironment* environment, Translation* translation);
139
140   // Declare methods that deal with the individual node types.
141 #define DECLARE_DO(type) void Do##type(L##type* node);
142   LITHIUM_CONCRETE_INSTRUCTION_LIST(DECLARE_DO)
143 #undef DECLARE_DO
144
145  private:
146   StrictMode strict_mode() const { return info()->strict_mode(); }
147
148   Scope* scope() const { return scope_; }
149
150   Register scratch0() { return kLithiumScratchReg; }
151   Register scratch1() { return kLithiumScratchReg2; }
152   DoubleRegister double_scratch0() { return kLithiumScratchDouble; }
153
154   LInstruction* GetNextInstruction();
155
156   void EmitClassOfTest(Label* if_true,
157                        Label* if_false,
158                        Handle<String> class_name,
159                        Register input,
160                        Register temporary,
161                        Register temporary2);
162
163   int GetStackSlotCount() const { return chunk()->spill_slot_count(); }
164
165   void AddDeferredCode(LDeferredCode* code) { deferred_.Add(code, zone()); }
166
167   void SaveCallerDoubles();
168   void RestoreCallerDoubles();
169
170   // Code generation passes.  Returns true if code generation should
171   // continue.
172   void GenerateBodyInstructionPre(LInstruction* instr) V8_OVERRIDE;
173   bool GeneratePrologue();
174   bool GenerateDeferredCode();
175   bool GenerateDeoptJumpTable();
176   bool GenerateSafepointTable();
177
178   // Generates the custom OSR entrypoint and sets the osr_pc_offset.
179   void GenerateOsrPrologue();
180
181   enum SafepointMode {
182     RECORD_SIMPLE_SAFEPOINT,
183     RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS
184   };
185
186   void CallCode(Handle<Code> code,
187                 RelocInfo::Mode mode,
188                 LInstruction* instr);
189
190   void CallCodeGeneric(Handle<Code> code,
191                        RelocInfo::Mode mode,
192                        LInstruction* instr,
193                        SafepointMode safepoint_mode);
194
195   void CallRuntime(const Runtime::Function* function,
196                    int num_arguments,
197                    LInstruction* instr,
198                    SaveFPRegsMode save_doubles = kDontSaveFPRegs);
199
200   void CallRuntime(Runtime::FunctionId id,
201                    int num_arguments,
202                    LInstruction* instr) {
203     const Runtime::Function* function = Runtime::FunctionForId(id);
204     CallRuntime(function, num_arguments, instr);
205   }
206
207   void LoadContextFromDeferred(LOperand* context);
208   void CallRuntimeFromDeferred(Runtime::FunctionId id,
209                                int argc,
210                                LInstruction* instr,
211                                LOperand* context);
212
213   enum A1State {
214     A1_UNINITIALIZED,
215     A1_CONTAINS_TARGET
216   };
217
218   // Generate a direct call to a known function.  Expects the function
219   // to be in a1.
220   void CallKnownFunction(Handle<JSFunction> function,
221                          int formal_parameter_count,
222                          int arity,
223                          LInstruction* instr,
224                          A1State a1_state);
225
226   void RecordSafepointWithLazyDeopt(LInstruction* instr,
227                                     SafepointMode safepoint_mode);
228
229   void RegisterEnvironmentForDeoptimization(LEnvironment* environment,
230                                             Safepoint::DeoptMode mode);
231   void DeoptimizeIf(Condition condition,
232                     LEnvironment* environment,
233                     Deoptimizer::BailoutType bailout_type,
234                     Register src1 = zero_reg,
235                     const Operand& src2 = Operand(zero_reg));
236   void DeoptimizeIf(Condition condition,
237                     LEnvironment* environment,
238                     Register src1 = zero_reg,
239                     const Operand& src2 = Operand(zero_reg));
240
241   void AddToTranslation(LEnvironment* environment,
242                         Translation* translation,
243                         LOperand* op,
244                         bool is_tagged,
245                         bool is_uint32,
246                         int* object_index_pointer,
247                         int* dematerialized_index_pointer);
248   void PopulateDeoptimizationData(Handle<Code> code);
249   int DefineDeoptimizationLiteral(Handle<Object> literal);
250
251   void PopulateDeoptimizationLiteralsWithInlinedFunctions();
252
253   Register ToRegister(int index) const;
254   DoubleRegister ToDoubleRegister(int index) const;
255
256   MemOperand BuildSeqStringOperand(Register string,
257                                    LOperand* index,
258                                    String::Encoding encoding);
259
260   void EmitIntegerMathAbs(LMathAbs* instr);
261
262   // Support for recording safepoint and position information.
263   void RecordSafepoint(LPointerMap* pointers,
264                        Safepoint::Kind kind,
265                        int arguments,
266                        Safepoint::DeoptMode mode);
267   void RecordSafepoint(LPointerMap* pointers, Safepoint::DeoptMode mode);
268   void RecordSafepoint(Safepoint::DeoptMode mode);
269   void RecordSafepointWithRegisters(LPointerMap* pointers,
270                                     int arguments,
271                                     Safepoint::DeoptMode mode);
272
273   void RecordAndWritePosition(int position) V8_OVERRIDE;
274
275   static Condition TokenToCondition(Token::Value op, bool is_unsigned);
276   void EmitGoto(int block);
277
278   // EmitBranch expects to be the last instruction of a block.
279   template<class InstrType>
280   void EmitBranch(InstrType instr,
281                   Condition condition,
282                   Register src1,
283                   const Operand& src2);
284   template<class InstrType>
285   void EmitBranchF(InstrType instr,
286                    Condition condition,
287                    FPURegister src1,
288                    FPURegister src2);
289   template<class InstrType>
290   void EmitFalseBranch(InstrType instr,
291                        Condition condition,
292                        Register src1,
293                        const Operand& src2);
294   template<class InstrType>
295   void EmitFalseBranchF(InstrType instr,
296                         Condition condition,
297                         FPURegister src1,
298                         FPURegister src2);
299   void EmitCmpI(LOperand* left, LOperand* right);
300   void EmitNumberUntagD(Register input,
301                         DoubleRegister result,
302                         bool allow_undefined_as_nan,
303                         bool deoptimize_on_minus_zero,
304                         LEnvironment* env,
305                         NumberUntagDMode mode);
306
307   // Emits optimized code for typeof x == "y".  Modifies input register.
308   // Returns the condition on which a final split to
309   // true and false label should be made, to optimize fallthrough.
310   // Returns two registers in cmp1 and cmp2 that can be used in the
311   // Branch instruction after EmitTypeofIs.
312   Condition EmitTypeofIs(Label* true_label,
313                          Label* false_label,
314                          Register input,
315                          Handle<String> type_name,
316                          Register* cmp1,
317                          Operand* cmp2);
318
319   // Emits optimized code for %_IsObject(x).  Preserves input register.
320   // Returns the condition on which a final split to
321   // true and false label should be made, to optimize fallthrough.
322   Condition EmitIsObject(Register input,
323                          Register temp1,
324                          Register temp2,
325                          Label* is_not_object,
326                          Label* is_object);
327
328   // Emits optimized code for %_IsString(x).  Preserves input register.
329   // Returns the condition on which a final split to
330   // true and false label should be made, to optimize fallthrough.
331   Condition EmitIsString(Register input,
332                          Register temp1,
333                          Label* is_not_string,
334                          SmiCheck check_needed);
335
336   // Emits optimized code for %_IsConstructCall().
337   // Caller should branch on equal condition.
338   void EmitIsConstructCall(Register temp1, Register temp2);
339
340   // Emits optimized code to deep-copy the contents of statically known
341   // object graphs (e.g. object literal boilerplate).
342   void EmitDeepCopy(Handle<JSObject> object,
343                     Register result,
344                     Register source,
345                     int* offset,
346                     AllocationSiteMode mode);
347   // Emit optimized code for integer division.
348   // Inputs are signed.
349   // All registers are clobbered.
350   // If 'remainder' is no_reg, it is not computed.
351   void EmitSignedIntegerDivisionByConstant(Register result,
352                                            Register dividend,
353                                            int32_t divisor,
354                                            Register remainder,
355                                            Register scratch,
356                                            LEnvironment* environment);
357
358
359   void EnsureSpaceForLazyDeopt(int space_needed) V8_OVERRIDE;
360   void DoLoadKeyedExternalArray(LLoadKeyed* instr);
361   void DoLoadKeyedFixedDoubleArray(LLoadKeyed* instr);
362   void DoLoadKeyedFixedArray(LLoadKeyed* instr);
363   void DoStoreKeyedExternalArray(LStoreKeyed* instr);
364   void DoStoreKeyedFixedDoubleArray(LStoreKeyed* instr);
365   void DoStoreKeyedFixedArray(LStoreKeyed* instr);
366
367   ZoneList<LEnvironment*> deoptimizations_;
368   ZoneList<Deoptimizer::JumpTableEntry> deopt_jump_table_;
369   ZoneList<Handle<Object> > deoptimization_literals_;
370   int inlined_function_count_;
371   Scope* const scope_;
372   TranslationBuffer translations_;
373   ZoneList<LDeferredCode*> deferred_;
374   int osr_pc_offset_;
375   bool frame_is_built_;
376
377   // Builder that keeps track of safepoints in the code. The table
378   // itself is emitted at the end of the generated code.
379   SafepointTableBuilder safepoints_;
380
381   // Compiler from a set of parallel moves to a sequential list of moves.
382   LGapResolver resolver_;
383
384   Safepoint::Kind expected_safepoint_kind_;
385
386   class PushSafepointRegistersScope V8_FINAL BASE_EMBEDDED {
387    public:
388     explicit PushSafepointRegistersScope(LCodeGen* codegen)
389         : codegen_(codegen) {
390       DCHECK(codegen_->info()->is_calling());
391       DCHECK(codegen_->expected_safepoint_kind_ == Safepoint::kSimple);
392       codegen_->expected_safepoint_kind_ = Safepoint::kWithRegisters;
393
394       StoreRegistersStateStub stub(codegen_->isolate());
395       codegen_->masm_->push(ra);
396       codegen_->masm_->CallStub(&stub);
397     }
398
399     ~PushSafepointRegistersScope() {
400       DCHECK(codegen_->expected_safepoint_kind_ == Safepoint::kWithRegisters);
401       RestoreRegistersStateStub stub(codegen_->isolate());
402       codegen_->masm_->push(ra);
403       codegen_->masm_->CallStub(&stub);
404       codegen_->expected_safepoint_kind_ = Safepoint::kSimple;
405     }
406
407    private:
408     LCodeGen* codegen_;
409   };
410
411   friend class LDeferredCode;
412   friend class LEnvironment;
413   friend class SafepointGenerator;
414   DISALLOW_COPY_AND_ASSIGN(LCodeGen);
415 };
416
417
418 class LDeferredCode : public ZoneObject {
419  public:
420   explicit LDeferredCode(LCodeGen* codegen)
421       : codegen_(codegen),
422         external_exit_(NULL),
423         instruction_index_(codegen->current_instruction_) {
424     codegen->AddDeferredCode(this);
425   }
426
427   virtual ~LDeferredCode() {}
428   virtual void Generate() = 0;
429   virtual LInstruction* instr() = 0;
430
431   void SetExit(Label* exit) { external_exit_ = exit; }
432   Label* entry() { return &entry_; }
433   Label* exit() { return external_exit_ != NULL ? external_exit_ : &exit_; }
434   int instruction_index() const { return instruction_index_; }
435
436  protected:
437   LCodeGen* codegen() const { return codegen_; }
438   MacroAssembler* masm() const { return codegen_->masm(); }
439
440  private:
441   LCodeGen* codegen_;
442   Label entry_;
443   Label exit_;
444   Label* external_exit_;
445   int instruction_index_;
446 };
447
448 } }  // namespace v8::internal
449
450 #endif  // V8_MIPS_LITHIUM_CODEGEN_MIPS_H_