64a6b3c40cebcafc2c47dabeba86bd4b124dbce6
[platform/framework/web/crosswalk.git] / src / v8 / src / ia32 / lithium-codegen-ia32.h
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #ifndef V8_IA32_LITHIUM_CODEGEN_IA32_H_
6 #define V8_IA32_LITHIUM_CODEGEN_IA32_H_
7
8 #include "src/ia32/lithium-ia32.h"
9
10 #include "src/checks.h"
11 #include "src/deoptimizer.h"
12 #include "src/ia32/lithium-gap-resolver-ia32.h"
13 #include "src/lithium-codegen.h"
14 #include "src/safepoint-table.h"
15 #include "src/scopes.h"
16 #include "src/utils.h"
17
18 namespace v8 {
19 namespace internal {
20
21 // Forward declarations.
22 class LDeferredCode;
23 class LGapNode;
24 class SafepointGenerator;
25
26 class LCodeGen: public LCodeGenBase {
27  public:
28   LCodeGen(LChunk* chunk, MacroAssembler* assembler, CompilationInfo* info)
29       : LCodeGenBase(chunk, assembler, info),
30         deoptimizations_(4, info->zone()),
31         jump_table_(4, info->zone()),
32         deoptimization_literals_(8, info->zone()),
33         inlined_function_count_(0),
34         scope_(info->scope()),
35         translations_(info->zone()),
36         deferred_(8, info->zone()),
37         dynamic_frame_alignment_(false),
38         support_aligned_spilled_doubles_(false),
39         osr_pc_offset_(-1),
40         frame_is_built_(false),
41         safepoints_(info->zone()),
42         resolver_(this),
43         expected_safepoint_kind_(Safepoint::kSimple) {
44     PopulateDeoptimizationLiteralsWithInlinedFunctions();
45   }
46
47   int LookupDestination(int block_id) const {
48     return chunk()->LookupDestination(block_id);
49   }
50
51   bool IsNextEmittedBlock(int block_id) const {
52     return LookupDestination(block_id) == GetNextEmittedBlock();
53   }
54
55   bool NeedsEagerFrame() const {
56     return GetStackSlotCount() > 0 ||
57         info()->is_non_deferred_calling() ||
58         !info()->IsStub() ||
59         info()->requires_frame();
60   }
61   bool NeedsDeferredFrame() const {
62     return !NeedsEagerFrame() && info()->is_deferred_calling();
63   }
64
65   // Support for converting LOperands to assembler types.
66   Operand ToOperand(LOperand* op) const;
67   Register ToRegister(LOperand* op) const;
68   XMMRegister ToDoubleRegister(LOperand* op) const;
69
70   bool IsInteger32(LConstantOperand* op) const;
71   bool IsSmi(LConstantOperand* op) const;
72   Immediate ToImmediate(LOperand* op, const Representation& r) const {
73     return Immediate(ToRepresentation(LConstantOperand::cast(op), r));
74   }
75   double ToDouble(LConstantOperand* op) const;
76
77   Handle<Object> ToHandle(LConstantOperand* op) const;
78
79   // The operand denoting the second word (the one with a higher address) of
80   // a double stack slot.
81   Operand HighOperand(LOperand* op);
82
83   // Try to generate code for the entire chunk, but it may fail if the
84   // chunk contains constructs we cannot handle. Returns true if the
85   // code generation attempt succeeded.
86   bool GenerateCode();
87
88   // Finish the code by setting stack height, safepoint, and bailout
89   // information on it.
90   void FinishCode(Handle<Code> code);
91
92   // Deferred code support.
93   void DoDeferredNumberTagD(LNumberTagD* instr);
94
95   enum IntegerSignedness { SIGNED_INT32, UNSIGNED_INT32 };
96   void DoDeferredNumberTagIU(LInstruction* instr,
97                              LOperand* value,
98                              LOperand* temp,
99                              IntegerSignedness signedness);
100
101   void DoDeferredTaggedToI(LTaggedToI* instr, Label* done);
102   void DoDeferredMathAbsTaggedHeapNumber(LMathAbs* instr);
103   void DoDeferredStackCheck(LStackCheck* instr);
104   void DoDeferredStringCharCodeAt(LStringCharCodeAt* instr);
105   void DoDeferredStringCharFromCode(LStringCharFromCode* instr);
106   void DoDeferredAllocate(LAllocate* instr);
107   void DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
108                                        Label* map_check);
109   void DoDeferredInstanceMigration(LCheckMaps* instr, Register object);
110   void DoDeferredLoadMutableDouble(LLoadFieldByIndex* instr,
111                                    Register object,
112                                    Register index);
113
114   // Parallel move support.
115   void DoParallelMove(LParallelMove* move);
116   void DoGap(LGap* instr);
117
118   // Emit frame translation commands for an environment.
119   void WriteTranslation(LEnvironment* environment, Translation* translation);
120
121   void EnsureRelocSpaceForDeoptimization();
122
123   // Declare methods that deal with the individual node types.
124 #define DECLARE_DO(type) void Do##type(L##type* node);
125   LITHIUM_CONCRETE_INSTRUCTION_LIST(DECLARE_DO)
126 #undef DECLARE_DO
127
128  private:
129   StrictMode strict_mode() const { return info()->strict_mode(); }
130
131   Scope* scope() const { return scope_; }
132
133   XMMRegister double_scratch0() const { return xmm0; }
134
135   void EmitClassOfTest(Label* if_true,
136                        Label* if_false,
137                        Handle<String> class_name,
138                        Register input,
139                        Register temporary,
140                        Register temporary2);
141
142   int GetStackSlotCount() const { return chunk()->spill_slot_count(); }
143
144   void AddDeferredCode(LDeferredCode* code) { deferred_.Add(code, zone()); }
145
146   void SaveCallerDoubles();
147   void RestoreCallerDoubles();
148
149   // Code generation passes.  Returns true if code generation should
150   // continue.
151   void GenerateBodyInstructionPre(LInstruction* instr) V8_OVERRIDE;
152   void GenerateBodyInstructionPost(LInstruction* instr) V8_OVERRIDE;
153   bool GeneratePrologue();
154   bool GenerateDeferredCode();
155   bool GenerateJumpTable();
156   bool GenerateSafepointTable();
157
158   // Generates the custom OSR entrypoint and sets the osr_pc_offset.
159   void GenerateOsrPrologue();
160
161   enum SafepointMode {
162     RECORD_SIMPLE_SAFEPOINT,
163     RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS
164   };
165
166   void CallCode(Handle<Code> code,
167                 RelocInfo::Mode mode,
168                 LInstruction* instr);
169
170   void CallCodeGeneric(Handle<Code> code,
171                        RelocInfo::Mode mode,
172                        LInstruction* instr,
173                        SafepointMode safepoint_mode);
174
175   void CallRuntime(const Runtime::Function* fun,
176                    int argc,
177                    LInstruction* instr,
178                    SaveFPRegsMode save_doubles = kDontSaveFPRegs);
179
180   void CallRuntime(Runtime::FunctionId id,
181                    int argc,
182                    LInstruction* instr) {
183     const Runtime::Function* function = Runtime::FunctionForId(id);
184     CallRuntime(function, argc, instr);
185   }
186
187   void CallRuntimeFromDeferred(Runtime::FunctionId id,
188                                int argc,
189                                LInstruction* instr,
190                                LOperand* context);
191
192   void LoadContextFromDeferred(LOperand* context);
193
194   enum EDIState {
195     EDI_UNINITIALIZED,
196     EDI_CONTAINS_TARGET
197   };
198
199   // Generate a direct call to a known function.  Expects the function
200   // to be in edi.
201   void CallKnownFunction(Handle<JSFunction> function,
202                          int formal_parameter_count,
203                          int arity,
204                          LInstruction* instr,
205                          EDIState edi_state);
206
207   void RecordSafepointWithLazyDeopt(LInstruction* instr,
208                                     SafepointMode safepoint_mode);
209
210   void RegisterEnvironmentForDeoptimization(LEnvironment* environment,
211                                             Safepoint::DeoptMode mode);
212   void DeoptimizeIf(Condition cc,
213                     LEnvironment* environment,
214                     Deoptimizer::BailoutType bailout_type);
215   void DeoptimizeIf(Condition cc, LEnvironment* environment);
216
217   bool DeoptEveryNTimes() {
218     return FLAG_deopt_every_n_times != 0 && !info()->IsStub();
219   }
220
221   void AddToTranslation(LEnvironment* environment,
222                         Translation* translation,
223                         LOperand* op,
224                         bool is_tagged,
225                         bool is_uint32,
226                         int* object_index_pointer,
227                         int* dematerialized_index_pointer);
228   void PopulateDeoptimizationData(Handle<Code> code);
229   int DefineDeoptimizationLiteral(Handle<Object> literal);
230
231   void PopulateDeoptimizationLiteralsWithInlinedFunctions();
232
233   Register ToRegister(int index) const;
234   XMMRegister ToDoubleRegister(int index) const;
235   int32_t ToRepresentation(LConstantOperand* op, const Representation& r) const;
236   int32_t ToInteger32(LConstantOperand* op) const;
237   ExternalReference ToExternalReference(LConstantOperand* op) const;
238
239   Operand BuildFastArrayOperand(LOperand* elements_pointer,
240                                 LOperand* key,
241                                 Representation key_representation,
242                                 ElementsKind elements_kind,
243                                 uint32_t base_offset);
244
245   Operand BuildSeqStringOperand(Register string,
246                                 LOperand* index,
247                                 String::Encoding encoding);
248
249   void EmitIntegerMathAbs(LMathAbs* instr);
250
251   // Support for recording safepoint and position information.
252   void RecordSafepoint(LPointerMap* pointers,
253                        Safepoint::Kind kind,
254                        int arguments,
255                        Safepoint::DeoptMode mode);
256   void RecordSafepoint(LPointerMap* pointers, Safepoint::DeoptMode mode);
257   void RecordSafepoint(Safepoint::DeoptMode mode);
258   void RecordSafepointWithRegisters(LPointerMap* pointers,
259                                     int arguments,
260                                     Safepoint::DeoptMode mode);
261
262   void RecordAndWritePosition(int position) V8_OVERRIDE;
263
264   static Condition TokenToCondition(Token::Value op, bool is_unsigned);
265   void EmitGoto(int block);
266
267   // EmitBranch expects to be the last instruction of a block.
268   template<class InstrType>
269   void EmitBranch(InstrType instr, Condition cc);
270   template<class InstrType>
271   void EmitFalseBranch(InstrType instr, Condition cc);
272   void EmitNumberUntagD(
273       Register input,
274       Register temp,
275       XMMRegister result,
276       bool allow_undefined_as_nan,
277       bool deoptimize_on_minus_zero,
278       LEnvironment* env,
279       NumberUntagDMode mode = NUMBER_CANDIDATE_IS_ANY_TAGGED);
280
281   // Emits optimized code for typeof x == "y".  Modifies input register.
282   // Returns the condition on which a final split to
283   // true and false label should be made, to optimize fallthrough.
284   Condition EmitTypeofIs(LTypeofIsAndBranch* instr, Register input);
285
286   // Emits optimized code for %_IsObject(x).  Preserves input register.
287   // Returns the condition on which a final split to
288   // true and false label should be made, to optimize fallthrough.
289   Condition EmitIsObject(Register input,
290                          Register temp1,
291                          Label* is_not_object,
292                          Label* is_object);
293
294   // Emits optimized code for %_IsString(x).  Preserves input register.
295   // Returns the condition on which a final split to
296   // true and false label should be made, to optimize fallthrough.
297   Condition EmitIsString(Register input,
298                          Register temp1,
299                          Label* is_not_string,
300                          SmiCheck check_needed);
301
302   // Emits optimized code for %_IsConstructCall().
303   // Caller should branch on equal condition.
304   void EmitIsConstructCall(Register temp);
305
306   // Emits optimized code to deep-copy the contents of statically known
307   // object graphs (e.g. object literal boilerplate).
308   void EmitDeepCopy(Handle<JSObject> object,
309                     Register result,
310                     Register source,
311                     int* offset,
312                     AllocationSiteMode mode);
313
314   void EnsureSpaceForLazyDeopt(int space_needed) V8_OVERRIDE;
315   void DoLoadKeyedExternalArray(LLoadKeyed* instr);
316   void DoLoadKeyedFixedDoubleArray(LLoadKeyed* instr);
317   void DoLoadKeyedFixedArray(LLoadKeyed* instr);
318   void DoStoreKeyedExternalArray(LStoreKeyed* instr);
319   void DoStoreKeyedFixedDoubleArray(LStoreKeyed* instr);
320   void DoStoreKeyedFixedArray(LStoreKeyed* instr);
321
322   void EmitReturn(LReturn* instr, bool dynamic_frame_alignment);
323
324   // Emits code for pushing either a tagged constant, a (non-double)
325   // register, or a stack slot operand.
326   void EmitPushTaggedOperand(LOperand* operand);
327
328   friend class LGapResolver;
329
330 #ifdef _MSC_VER
331   // On windows, you may not access the stack more than one page below
332   // the most recently mapped page. To make the allocated area randomly
333   // accessible, we write an arbitrary value to each page in range
334   // esp + offset - page_size .. esp in turn.
335   void MakeSureStackPagesMapped(int offset);
336 #endif
337
338   ZoneList<LEnvironment*> deoptimizations_;
339   ZoneList<Deoptimizer::JumpTableEntry> jump_table_;
340   ZoneList<Handle<Object> > deoptimization_literals_;
341   int inlined_function_count_;
342   Scope* const scope_;
343   TranslationBuffer translations_;
344   ZoneList<LDeferredCode*> deferred_;
345   bool dynamic_frame_alignment_;
346   bool support_aligned_spilled_doubles_;
347   int osr_pc_offset_;
348   bool frame_is_built_;
349
350   // Builder that keeps track of safepoints in the code. The table
351   // itself is emitted at the end of the generated code.
352   SafepointTableBuilder safepoints_;
353
354   // Compiler from a set of parallel moves to a sequential list of moves.
355   LGapResolver resolver_;
356
357   Safepoint::Kind expected_safepoint_kind_;
358
359   class PushSafepointRegistersScope V8_FINAL  BASE_EMBEDDED {
360    public:
361     explicit PushSafepointRegistersScope(LCodeGen* codegen)
362         : codegen_(codegen) {
363       ASSERT(codegen_->expected_safepoint_kind_ == Safepoint::kSimple);
364       codegen_->masm_->PushSafepointRegisters();
365       codegen_->expected_safepoint_kind_ = Safepoint::kWithRegisters;
366       ASSERT(codegen_->info()->is_calling());
367     }
368
369     ~PushSafepointRegistersScope() {
370       ASSERT(codegen_->expected_safepoint_kind_ == Safepoint::kWithRegisters);
371       codegen_->masm_->PopSafepointRegisters();
372       codegen_->expected_safepoint_kind_ = Safepoint::kSimple;
373     }
374
375    private:
376     LCodeGen* codegen_;
377   };
378
379   friend class LDeferredCode;
380   friend class LEnvironment;
381   friend class SafepointGenerator;
382   DISALLOW_COPY_AND_ASSIGN(LCodeGen);
383 };
384
385
386 class LDeferredCode : public ZoneObject {
387  public:
388   explicit LDeferredCode(LCodeGen* codegen)
389       : codegen_(codegen),
390         external_exit_(NULL),
391         instruction_index_(codegen->current_instruction_) {
392     codegen->AddDeferredCode(this);
393   }
394
395   virtual ~LDeferredCode() {}
396   virtual void Generate() = 0;
397   virtual LInstruction* instr() = 0;
398
399   void SetExit(Label* exit) { external_exit_ = exit; }
400   Label* entry() { return &entry_; }
401   Label* exit() { return external_exit_ != NULL ? external_exit_ : &exit_; }
402   Label* done() { return codegen_->NeedsDeferredFrame() ? &done_ : exit(); }
403   int instruction_index() const { return instruction_index_; }
404
405  protected:
406   LCodeGen* codegen() const { return codegen_; }
407   MacroAssembler* masm() const { return codegen_->masm(); }
408
409  private:
410   LCodeGen* codegen_;
411   Label entry_;
412   Label exit_;
413   Label* external_exit_;
414   Label done_;
415   int instruction_index_;
416 };
417
418 } }  // namespace v8::internal
419
420 #endif  // V8_IA32_LITHIUM_CODEGEN_IA32_H_