Correctify instanceof and make it optimizable.
[platform/upstream/v8.git] / src / mips / lithium-codegen-mips.h
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #ifndef V8_MIPS_LITHIUM_CODEGEN_MIPS_H_
6 #define V8_MIPS_LITHIUM_CODEGEN_MIPS_H_
7
8 #include "src/deoptimizer.h"
9 #include "src/lithium-codegen.h"
10 #include "src/mips/lithium-gap-resolver-mips.h"
11 #include "src/mips/lithium-mips.h"
12 #include "src/safepoint-table.h"
13 #include "src/scopes.h"
14 #include "src/utils.h"
15
16 namespace v8 {
17 namespace internal {
18
19 // Forward declarations.
20 class LDeferredCode;
21 class SafepointGenerator;
22
23 class LCodeGen: public LCodeGenBase {
24  public:
25   LCodeGen(LChunk* chunk, MacroAssembler* assembler, CompilationInfo* info)
26       : LCodeGenBase(chunk, assembler, info),
27         deoptimizations_(4, info->zone()),
28         jump_table_(4, info->zone()),
29         inlined_function_count_(0),
30         scope_(info->scope()),
31         translations_(info->zone()),
32         deferred_(8, info->zone()),
33         osr_pc_offset_(-1),
34         frame_is_built_(false),
35         safepoints_(info->zone()),
36         resolver_(this),
37         expected_safepoint_kind_(Safepoint::kSimple) {
38     PopulateDeoptimizationLiteralsWithInlinedFunctions();
39   }
40
41
42   int LookupDestination(int block_id) const {
43     return chunk()->LookupDestination(block_id);
44   }
45
46   bool IsNextEmittedBlock(int block_id) const {
47     return LookupDestination(block_id) == GetNextEmittedBlock();
48   }
49
50   bool NeedsEagerFrame() const {
51     return GetStackSlotCount() > 0 ||
52         info()->is_non_deferred_calling() ||
53         !info()->IsStub() ||
54         info()->requires_frame();
55   }
56   bool NeedsDeferredFrame() const {
57     return !NeedsEagerFrame() && info()->is_deferred_calling();
58   }
59
60   RAStatus GetRAState() const {
61     return frame_is_built_ ? kRAHasBeenSaved : kRAHasNotBeenSaved;
62   }
63
64   // Support for converting LOperands to assembler types.
65   // LOperand must be a register.
66   Register ToRegister(LOperand* op) const;
67
68   // LOperand is loaded into scratch, unless already a register.
69   Register EmitLoadRegister(LOperand* op, Register scratch);
70
71   // LOperand must be a double register.
72   DoubleRegister ToDoubleRegister(LOperand* op) const;
73
74   // LOperand is loaded into dbl_scratch, unless already a double register.
75   DoubleRegister EmitLoadDoubleRegister(LOperand* op,
76                                         FloatRegister flt_scratch,
77                                         DoubleRegister dbl_scratch);
78   int32_t ToRepresentation(LConstantOperand* op, const Representation& r) const;
79   int32_t ToInteger32(LConstantOperand* op) const;
80   Smi* ToSmi(LConstantOperand* op) const;
81   double ToDouble(LConstantOperand* op) const;
82   Operand ToOperand(LOperand* op);
83   MemOperand ToMemOperand(LOperand* op) const;
84   // Returns a MemOperand pointing to the high word of a DoubleStackSlot.
85   MemOperand ToHighMemOperand(LOperand* op) const;
86
87   bool IsInteger32(LConstantOperand* op) const;
88   bool IsSmi(LConstantOperand* op) const;
89   Handle<Object> ToHandle(LConstantOperand* op) const;
90
91   // Try to generate code for the entire chunk, but it may fail if the
92   // chunk contains constructs we cannot handle. Returns true if the
93   // code generation attempt succeeded.
94   bool GenerateCode();
95
96   // Finish the code by setting stack height, safepoint, and bailout
97   // information on it.
98   void FinishCode(Handle<Code> code);
99
100   void DoDeferredNumberTagD(LNumberTagD* instr);
101
102   enum IntegerSignedness { SIGNED_INT32, UNSIGNED_INT32 };
103   void DoDeferredNumberTagIU(LInstruction* instr,
104                              LOperand* value,
105                              LOperand* temp1,
106                              LOperand* temp2,
107                              IntegerSignedness signedness);
108
109   void DoDeferredTaggedToI(LTaggedToI* instr);
110   void DoDeferredMathAbsTaggedHeapNumber(LMathAbs* instr);
111   void DoDeferredStackCheck(LStackCheck* instr);
112   void DoDeferredMaybeGrowElements(LMaybeGrowElements* instr);
113   void DoDeferredStringCharCodeAt(LStringCharCodeAt* instr);
114   void DoDeferredStringCharFromCode(LStringCharFromCode* instr);
115   void DoDeferredAllocate(LAllocate* instr);
116   void DoDeferredInstanceMigration(LCheckMaps* instr, Register object);
117   void DoDeferredLoadMutableDouble(LLoadFieldByIndex* instr,
118                                    Register result,
119                                    Register object,
120                                    Register index);
121
122   // Parallel move support.
123   void DoParallelMove(LParallelMove* move);
124   void DoGap(LGap* instr);
125
126   MemOperand PrepareKeyedOperand(Register key,
127                                  Register base,
128                                  bool key_is_constant,
129                                  int constant_key,
130                                  int element_size,
131                                  int shift_size,
132                                  int base_offset);
133
134   // Emit frame translation commands for an environment.
135   void WriteTranslation(LEnvironment* environment, Translation* translation);
136
137   // Declare methods that deal with the individual node types.
138 #define DECLARE_DO(type) void Do##type(L##type* node);
139   LITHIUM_CONCRETE_INSTRUCTION_LIST(DECLARE_DO)
140 #undef DECLARE_DO
141
142  private:
143   LanguageMode language_mode() const { return info()->language_mode(); }
144
145   Scope* scope() const { return scope_; }
146
147   Register scratch0() { return kLithiumScratchReg; }
148   Register scratch1() { return kLithiumScratchReg2; }
149   DoubleRegister double_scratch0() { return kLithiumScratchDouble; }
150
151   LInstruction* GetNextInstruction();
152
153   void EmitClassOfTest(Label* if_true,
154                        Label* if_false,
155                        Handle<String> class_name,
156                        Register input,
157                        Register temporary,
158                        Register temporary2);
159
160   int GetStackSlotCount() const { return chunk()->spill_slot_count(); }
161
162   void AddDeferredCode(LDeferredCode* code) { deferred_.Add(code, zone()); }
163
164   void SaveCallerDoubles();
165   void RestoreCallerDoubles();
166
167   // Code generation passes.  Returns true if code generation should
168   // continue.
169   void GenerateBodyInstructionPre(LInstruction* instr) override;
170   bool GeneratePrologue();
171   bool GenerateDeferredCode();
172   bool GenerateJumpTable();
173   bool GenerateSafepointTable();
174
175   // Generates the custom OSR entrypoint and sets the osr_pc_offset.
176   void GenerateOsrPrologue();
177
178   enum SafepointMode {
179     RECORD_SIMPLE_SAFEPOINT,
180     RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS
181   };
182
183   void CallCode(Handle<Code> code,
184                 RelocInfo::Mode mode,
185                 LInstruction* instr);
186
187   void CallCodeGeneric(Handle<Code> code,
188                        RelocInfo::Mode mode,
189                        LInstruction* instr,
190                        SafepointMode safepoint_mode);
191
192   void CallRuntime(const Runtime::Function* function,
193                    int num_arguments,
194                    LInstruction* instr,
195                    SaveFPRegsMode save_doubles = kDontSaveFPRegs);
196
197   void CallRuntime(Runtime::FunctionId id,
198                    int num_arguments,
199                    LInstruction* instr) {
200     const Runtime::Function* function = Runtime::FunctionForId(id);
201     CallRuntime(function, num_arguments, instr);
202   }
203
204   void LoadContextFromDeferred(LOperand* context);
205   void CallRuntimeFromDeferred(Runtime::FunctionId id,
206                                int argc,
207                                LInstruction* instr,
208                                LOperand* context);
209
210   // Generate a direct call to a known function.  Expects the function
211   // to be in a1.
212   void CallKnownFunction(Handle<JSFunction> function,
213                          int formal_parameter_count, int arity,
214                          LInstruction* instr);
215
216   void RecordSafepointWithLazyDeopt(LInstruction* instr,
217                                     SafepointMode safepoint_mode);
218
219   void RegisterEnvironmentForDeoptimization(LEnvironment* environment,
220                                             Safepoint::DeoptMode mode);
221   void DeoptimizeIf(Condition condition, LInstruction* instr,
222                     Deoptimizer::DeoptReason deopt_reason,
223                     Deoptimizer::BailoutType bailout_type,
224                     Register src1 = zero_reg,
225                     const Operand& src2 = Operand(zero_reg));
226   void DeoptimizeIf(
227       Condition condition, LInstruction* instr,
228       Deoptimizer::DeoptReason deopt_reason = Deoptimizer::kNoReason,
229       Register src1 = zero_reg, const Operand& src2 = Operand(zero_reg));
230
231   void AddToTranslation(LEnvironment* environment,
232                         Translation* translation,
233                         LOperand* op,
234                         bool is_tagged,
235                         bool is_uint32,
236                         int* object_index_pointer,
237                         int* dematerialized_index_pointer);
238   void PopulateDeoptimizationData(Handle<Code> code);
239
240   void PopulateDeoptimizationLiteralsWithInlinedFunctions();
241
242   Register ToRegister(int index) const;
243   DoubleRegister ToDoubleRegister(int index) const;
244
245   MemOperand BuildSeqStringOperand(Register string,
246                                    LOperand* index,
247                                    String::Encoding encoding);
248
249   void EmitIntegerMathAbs(LMathAbs* instr);
250
251   // Support for recording safepoint and position information.
252   void RecordSafepoint(LPointerMap* pointers,
253                        Safepoint::Kind kind,
254                        int arguments,
255                        Safepoint::DeoptMode mode);
256   void RecordSafepoint(LPointerMap* pointers, Safepoint::DeoptMode mode);
257   void RecordSafepoint(Safepoint::DeoptMode mode);
258   void RecordSafepointWithRegisters(LPointerMap* pointers,
259                                     int arguments,
260                                     Safepoint::DeoptMode mode);
261
262   void RecordAndWritePosition(int position) override;
263
264   static Condition TokenToCondition(Token::Value op, bool is_unsigned);
265   void EmitGoto(int block);
266
267   // EmitBranch expects to be the last instruction of a block.
268   template<class InstrType>
269   void EmitBranch(InstrType instr,
270                   Condition condition,
271                   Register src1,
272                   const Operand& src2);
273   template<class InstrType>
274   void EmitBranchF(InstrType instr,
275                    Condition condition,
276                    FPURegister src1,
277                    FPURegister src2);
278   template <class InstrType>
279   void EmitTrueBranch(InstrType instr, Condition condition, Register src1,
280                       const Operand& src2);
281   template <class InstrType>
282   void EmitFalseBranch(InstrType instr, Condition condition, Register src1,
283                        const Operand& src2);
284   template<class InstrType>
285   void EmitFalseBranchF(InstrType instr,
286                         Condition condition,
287                         FPURegister src1,
288                         FPURegister src2);
289   void EmitCmpI(LOperand* left, LOperand* right);
290   void EmitNumberUntagD(LNumberUntagD* instr, Register input,
291                         DoubleRegister result, NumberUntagDMode mode);
292
293   // Emits optimized code for typeof x == "y".  Modifies input register.
294   // Returns the condition on which a final split to
295   // true and false label should be made, to optimize fallthrough.
296   // Returns two registers in cmp1 and cmp2 that can be used in the
297   // Branch instruction after EmitTypeofIs.
298   Condition EmitTypeofIs(Label* true_label,
299                          Label* false_label,
300                          Register input,
301                          Handle<String> type_name,
302                          Register* cmp1,
303                          Operand* cmp2);
304
305   // Emits optimized code for %_IsObject(x).  Preserves input register.
306   // Returns the condition on which a final split to
307   // true and false label should be made, to optimize fallthrough.
308   Condition EmitIsObject(Register input,
309                          Register temp1,
310                          Register temp2,
311                          Label* is_not_object,
312                          Label* is_object);
313
314   // Emits optimized code for %_IsString(x).  Preserves input register.
315   // Returns the condition on which a final split to
316   // true and false label should be made, to optimize fallthrough.
317   Condition EmitIsString(Register input,
318                          Register temp1,
319                          Label* is_not_string,
320                          SmiCheck check_needed);
321
322   // Emits optimized code for %_IsConstructCall().
323   // Caller should branch on equal condition.
324   void EmitIsConstructCall(Register temp1, Register temp2);
325
326   // Emits optimized code to deep-copy the contents of statically known
327   // object graphs (e.g. object literal boilerplate).
328   void EmitDeepCopy(Handle<JSObject> object,
329                     Register result,
330                     Register source,
331                     int* offset,
332                     AllocationSiteMode mode);
333   // Emit optimized code for integer division.
334   // Inputs are signed.
335   // All registers are clobbered.
336   // If 'remainder' is no_reg, it is not computed.
337   void EmitSignedIntegerDivisionByConstant(Register result,
338                                            Register dividend,
339                                            int32_t divisor,
340                                            Register remainder,
341                                            Register scratch,
342                                            LEnvironment* environment);
343
344
345   void EnsureSpaceForLazyDeopt(int space_needed) override;
346   void DoLoadKeyedExternalArray(LLoadKeyed* instr);
347   void DoLoadKeyedFixedDoubleArray(LLoadKeyed* instr);
348   void DoLoadKeyedFixedArray(LLoadKeyed* instr);
349   void DoStoreKeyedExternalArray(LStoreKeyed* instr);
350   void DoStoreKeyedFixedDoubleArray(LStoreKeyed* instr);
351   void DoStoreKeyedFixedArray(LStoreKeyed* instr);
352
353   template <class T>
354   void EmitVectorLoadICRegisters(T* instr);
355   template <class T>
356   void EmitVectorStoreICRegisters(T* instr);
357
358   ZoneList<LEnvironment*> deoptimizations_;
359   ZoneList<Deoptimizer::JumpTableEntry> jump_table_;
360   int inlined_function_count_;
361   Scope* const scope_;
362   TranslationBuffer translations_;
363   ZoneList<LDeferredCode*> deferred_;
364   int osr_pc_offset_;
365   bool frame_is_built_;
366
367   // Builder that keeps track of safepoints in the code. The table
368   // itself is emitted at the end of the generated code.
369   SafepointTableBuilder safepoints_;
370
371   // Compiler from a set of parallel moves to a sequential list of moves.
372   LGapResolver resolver_;
373
374   Safepoint::Kind expected_safepoint_kind_;
375
376   class PushSafepointRegistersScope final BASE_EMBEDDED {
377    public:
378     explicit PushSafepointRegistersScope(LCodeGen* codegen)
379         : codegen_(codegen) {
380       DCHECK(codegen_->info()->is_calling());
381       DCHECK(codegen_->expected_safepoint_kind_ == Safepoint::kSimple);
382       codegen_->expected_safepoint_kind_ = Safepoint::kWithRegisters;
383
384       StoreRegistersStateStub stub(codegen_->isolate());
385       codegen_->masm_->push(ra);
386       codegen_->masm_->CallStub(&stub);
387     }
388
389     ~PushSafepointRegistersScope() {
390       DCHECK(codegen_->expected_safepoint_kind_ == Safepoint::kWithRegisters);
391       RestoreRegistersStateStub stub(codegen_->isolate());
392       codegen_->masm_->push(ra);
393       codegen_->masm_->CallStub(&stub);
394       codegen_->expected_safepoint_kind_ = Safepoint::kSimple;
395     }
396
397    private:
398     LCodeGen* codegen_;
399   };
400
401   friend class LDeferredCode;
402   friend class LEnvironment;
403   friend class SafepointGenerator;
404   DISALLOW_COPY_AND_ASSIGN(LCodeGen);
405 };
406
407
408 class LDeferredCode : public ZoneObject {
409  public:
410   explicit LDeferredCode(LCodeGen* codegen)
411       : codegen_(codegen),
412         external_exit_(NULL),
413         instruction_index_(codegen->current_instruction_) {
414     codegen->AddDeferredCode(this);
415   }
416
417   virtual ~LDeferredCode() {}
418   virtual void Generate() = 0;
419   virtual LInstruction* instr() = 0;
420
421   void SetExit(Label* exit) { external_exit_ = exit; }
422   Label* entry() { return &entry_; }
423   Label* exit() { return external_exit_ != NULL ? external_exit_ : &exit_; }
424   int instruction_index() const { return instruction_index_; }
425
426  protected:
427   LCodeGen* codegen() const { return codegen_; }
428   MacroAssembler* masm() const { return codegen_->masm(); }
429
430  private:
431   LCodeGen* codegen_;
432   Label entry_;
433   Label exit_;
434   Label* external_exit_;
435   int instruction_index_;
436 };
437
438 } }  // namespace v8::internal
439
440 #endif  // V8_MIPS_LITHIUM_CODEGEN_MIPS_H_