Upstream version 9.38.207.0
[platform/framework/web/crosswalk.git] / src / v8 / src / ia32 / lithium-codegen-ia32.h
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #ifndef V8_IA32_LITHIUM_CODEGEN_IA32_H_
6 #define V8_IA32_LITHIUM_CODEGEN_IA32_H_
7
8 #include "src/ia32/lithium-ia32.h"
9
10 #include "src/base/logging.h"
11 #include "src/deoptimizer.h"
12 #include "src/ia32/lithium-gap-resolver-ia32.h"
13 #include "src/lithium-codegen.h"
14 #include "src/safepoint-table.h"
15 #include "src/scopes.h"
16 #include "src/utils.h"
17
18 namespace v8 {
19 namespace internal {
20
21 // Forward declarations.
22 class LDeferredCode;
23 class LGapNode;
24 class SafepointGenerator;
25
26 class LCodeGen: public LCodeGenBase {
27  public:
28   LCodeGen(LChunk* chunk, MacroAssembler* assembler, CompilationInfo* info)
29       : LCodeGenBase(chunk, assembler, info),
30         deoptimizations_(4, info->zone()),
31         jump_table_(4, info->zone()),
32         deoptimization_literals_(8, info->zone()),
33         inlined_function_count_(0),
34         scope_(info->scope()),
35         translations_(info->zone()),
36         deferred_(8, info->zone()),
37         dynamic_frame_alignment_(false),
38         support_aligned_spilled_doubles_(false),
39         osr_pc_offset_(-1),
40         frame_is_built_(false),
41         safepoints_(info->zone()),
42         resolver_(this),
43         expected_safepoint_kind_(Safepoint::kSimple) {
44     PopulateDeoptimizationLiteralsWithInlinedFunctions();
45   }
46
47   int LookupDestination(int block_id) const {
48     return chunk()->LookupDestination(block_id);
49   }
50
51   bool IsNextEmittedBlock(int block_id) const {
52     return LookupDestination(block_id) == GetNextEmittedBlock();
53   }
54
55   bool NeedsEagerFrame() const {
56     return GetStackSlotCount() > 0 ||
57         info()->is_non_deferred_calling() ||
58         !info()->IsStub() ||
59         info()->requires_frame();
60   }
61   bool NeedsDeferredFrame() const {
62     return !NeedsEagerFrame() && info()->is_deferred_calling();
63   }
64
65   // Support for converting LOperands to assembler types.
66   Operand ToOperand(LOperand* op) const;
67   Register ToRegister(LOperand* op) const;
68   XMMRegister ToDoubleRegister(LOperand* op) const;
69   XMMRegister ToFloat32x4Register(LOperand* op) const;
70   XMMRegister ToFloat64x2Register(LOperand* op) const;
71   XMMRegister ToInt32x4Register(LOperand* op) const;
72   XMMRegister ToSIMD128Register(LOperand* op) const;
73
74   bool IsInteger32(LConstantOperand* op) const;
75   bool IsSmi(LConstantOperand* op) const;
76   Immediate ToImmediate(LOperand* op, const Representation& r) const {
77     return Immediate(ToRepresentation(LConstantOperand::cast(op), r));
78   }
79   double ToDouble(LConstantOperand* op) const;
80
81   Handle<Object> ToHandle(LConstantOperand* op) const;
82
83   // The operand denoting the second word (the one with a higher address) of
84   // a double stack slot.
85   Operand HighOperand(LOperand* op);
86
87   // Try to generate code for the entire chunk, but it may fail if the
88   // chunk contains constructs we cannot handle. Returns true if the
89   // code generation attempt succeeded.
90   bool GenerateCode();
91
92   // Finish the code by setting stack height, safepoint, and bailout
93   // information on it.
94   void FinishCode(Handle<Code> code);
95
96   // Deferred code support.
97   void DoDeferredNumberTagD(LNumberTagD* instr);
98
99   enum IntegerSignedness { SIGNED_INT32, UNSIGNED_INT32 };
100   void DoDeferredNumberTagIU(LInstruction* instr,
101                              LOperand* value,
102                              LOperand* temp,
103                              IntegerSignedness signedness);
104
105   void DoDeferredTaggedToI(LTaggedToI* instr, Label* done);
106   void DoDeferredFloat32x4ToTagged(LInstruction* instr);
107   void DoDeferredInt32x4ToTagged(LInstruction* instr);
108   void DoDeferredMathAbsTaggedHeapNumber(LMathAbs* instr);
109   void DoDeferredStackCheck(LStackCheck* instr);
110   void DoDeferredStringCharCodeAt(LStringCharCodeAt* instr);
111   void DoDeferredStringCharFromCode(LStringCharFromCode* instr);
112   void DoDeferredAllocate(LAllocate* instr);
113   void DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
114                                        Label* map_check);
115   void DoDeferredInstanceMigration(LCheckMaps* instr, Register object);
116   void DoDeferredLoadMutableDouble(LLoadFieldByIndex* instr,
117                                    Register object,
118                                    Register index);
119   void DoDeferredSIMD128ToTagged(LInstruction* instr, Runtime::FunctionId id);
120
121   template<class T>
122   void HandleTaggedToSIMD128(LTaggedToSIMD128* instr);
123   template<class T>
124   void HandleSIMD128ToTagged(LSIMD128ToTagged* instr);
125
126   // Parallel move support.
127   void DoParallelMove(LParallelMove* move);
128   void DoGap(LGap* instr);
129
130   // Emit frame translation commands for an environment.
131   void WriteTranslation(LEnvironment* environment, Translation* translation);
132
133   void EnsureRelocSpaceForDeoptimization();
134
135   // Declare methods that deal with the individual node types.
136 #define DECLARE_DO(type) void Do##type(L##type* node);
137   LITHIUM_CONCRETE_INSTRUCTION_LIST(DECLARE_DO)
138 #undef DECLARE_DO
139
140  private:
141   StrictMode strict_mode() const { return info()->strict_mode(); }
142
143   Scope* scope() const { return scope_; }
144
145   XMMRegister double_scratch0() const { return xmm0; }
146
147   void EmitClassOfTest(Label* if_true,
148                        Label* if_false,
149                        Handle<String> class_name,
150                        Register input,
151                        Register temporary,
152                        Register temporary2);
153
154   int GetStackSlotCount() const { return chunk()->spill_slot_count(); }
155
156   void AddDeferredCode(LDeferredCode* code) { deferred_.Add(code, zone()); }
157
158   void SaveCallerDoubles();
159   void RestoreCallerDoubles();
160
161   // Code generation passes.  Returns true if code generation should
162   // continue.
163   void GenerateBodyInstructionPre(LInstruction* instr) V8_OVERRIDE;
164   void GenerateBodyInstructionPost(LInstruction* instr) V8_OVERRIDE;
165   bool GeneratePrologue();
166   bool GenerateDeferredCode();
167   bool GenerateJumpTable();
168   bool GenerateSafepointTable();
169
170   // Generates the custom OSR entrypoint and sets the osr_pc_offset.
171   void GenerateOsrPrologue();
172
173   enum SafepointMode {
174     RECORD_SIMPLE_SAFEPOINT,
175     RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS
176   };
177
178   void CallCode(Handle<Code> code,
179                 RelocInfo::Mode mode,
180                 LInstruction* instr);
181
182   void CallCodeGeneric(Handle<Code> code,
183                        RelocInfo::Mode mode,
184                        LInstruction* instr,
185                        SafepointMode safepoint_mode);
186
187   void CallRuntime(const Runtime::Function* fun,
188                    int argc,
189                    LInstruction* instr,
190                    SaveFPRegsMode save_doubles = kDontSaveFPRegs);
191
192   void CallRuntime(Runtime::FunctionId id,
193                    int argc,
194                    LInstruction* instr) {
195     const Runtime::Function* function = Runtime::FunctionForId(id);
196     CallRuntime(function, argc, instr);
197   }
198
199   void CallRuntimeFromDeferred(Runtime::FunctionId id,
200                                int argc,
201                                LInstruction* instr,
202                                LOperand* context);
203
204   void LoadContextFromDeferred(LOperand* context);
205
206   enum EDIState {
207     EDI_UNINITIALIZED,
208     EDI_CONTAINS_TARGET
209   };
210
211   // Generate a direct call to a known function.  Expects the function
212   // to be in edi.
213   void CallKnownFunction(Handle<JSFunction> function,
214                          int formal_parameter_count,
215                          int arity,
216                          LInstruction* instr,
217                          EDIState edi_state);
218
219   void RecordSafepointWithLazyDeopt(LInstruction* instr,
220                                     SafepointMode safepoint_mode);
221
222   void RegisterEnvironmentForDeoptimization(LEnvironment* environment,
223                                             Safepoint::DeoptMode mode);
224   void DeoptimizeIf(Condition cc,
225                     LEnvironment* environment,
226                     Deoptimizer::BailoutType bailout_type);
227   void DeoptimizeIf(Condition cc, LEnvironment* environment);
228
229   bool DeoptEveryNTimes() {
230     return FLAG_deopt_every_n_times != 0 && !info()->IsStub();
231   }
232
233   void AddToTranslation(LEnvironment* environment,
234                         Translation* translation,
235                         LOperand* op,
236                         bool is_tagged,
237                         bool is_uint32,
238                         int* object_index_pointer,
239                         int* dematerialized_index_pointer);
240   void PopulateDeoptimizationData(Handle<Code> code);
241   int DefineDeoptimizationLiteral(Handle<Object> literal);
242
243   void PopulateDeoptimizationLiteralsWithInlinedFunctions();
244
245   Register ToRegister(int index) const;
246   XMMRegister ToDoubleRegister(int index) const;
247   XMMRegister ToFloat32x4Register(int index) const;
248   XMMRegister ToFloat64x2Register(int index) const;
249   XMMRegister ToInt32x4Register(int index) const;
250   XMMRegister ToSIMD128Register(int index) const;
251   int32_t ToRepresentation(LConstantOperand* op, const Representation& r) const;
252   int32_t ToInteger32(LConstantOperand* op) const;
253   ExternalReference ToExternalReference(LConstantOperand* op) const;
254
255   Operand BuildFastArrayOperand(LOperand* elements_pointer,
256                                 LOperand* key,
257                                 Representation key_representation,
258                                 ElementsKind elements_kind,
259                                 uint32_t base_offset);
260
261   Operand BuildSeqStringOperand(Register string,
262                                 LOperand* index,
263                                 String::Encoding encoding);
264
265   void EmitIntegerMathAbs(LMathAbs* instr);
266
267   // Support for recording safepoint and position information.
268   void RecordSafepoint(LPointerMap* pointers,
269                        Safepoint::Kind kind,
270                        int arguments,
271                        Safepoint::DeoptMode mode);
272   void RecordSafepoint(LPointerMap* pointers, Safepoint::DeoptMode mode);
273   void RecordSafepoint(Safepoint::DeoptMode mode);
274   void RecordSafepointWithRegisters(LPointerMap* pointers,
275                                     int arguments,
276                                     Safepoint::DeoptMode mode);
277
278   void RecordAndWritePosition(int position) V8_OVERRIDE;
279
280   static Condition TokenToCondition(Token::Value op, bool is_unsigned);
281   void EmitGoto(int block);
282
283   // EmitBranch expects to be the last instruction of a block.
284   template<class InstrType>
285   void EmitBranch(InstrType instr, Condition cc);
286   template<class InstrType>
287   void EmitFalseBranch(InstrType instr, Condition cc);
288   void EmitNumberUntagD(
289       Register input,
290       Register temp,
291       XMMRegister result,
292       bool allow_undefined_as_nan,
293       bool deoptimize_on_minus_zero,
294       LEnvironment* env,
295       NumberUntagDMode mode = NUMBER_CANDIDATE_IS_ANY_TAGGED);
296
297   // Emits optimized code for typeof x == "y".  Modifies input register.
298   // Returns the condition on which a final split to
299   // true and false label should be made, to optimize fallthrough.
300   Condition EmitTypeofIs(LTypeofIsAndBranch* instr, Register input);
301
302   // Emits optimized code for %_IsObject(x).  Preserves input register.
303   // Returns the condition on which a final split to
304   // true and false label should be made, to optimize fallthrough.
305   Condition EmitIsObject(Register input,
306                          Register temp1,
307                          Label* is_not_object,
308                          Label* is_object);
309
310   // Emits optimized code for %_IsString(x).  Preserves input register.
311   // Returns the condition on which a final split to
312   // true and false label should be made, to optimize fallthrough.
313   Condition EmitIsString(Register input,
314                          Register temp1,
315                          Label* is_not_string,
316                          SmiCheck check_needed);
317
318   // Emits optimized code for %_IsConstructCall().
319   // Caller should branch on equal condition.
320   void EmitIsConstructCall(Register temp);
321
322   // Emits optimized code to deep-copy the contents of statically known
323   // object graphs (e.g. object literal boilerplate).
324   void EmitDeepCopy(Handle<JSObject> object,
325                     Register result,
326                     Register source,
327                     int* offset,
328                     AllocationSiteMode mode);
329
330   void EnsureSpaceForLazyDeopt(int space_needed) V8_OVERRIDE;
331   void DoLoadKeyedExternalArray(LLoadKeyed* instr);
332   void HandleExternalArrayOpRequiresTemp(LOperand* key,
333                                          Representation key_representation,
334                                          ElementsKind elements_kind);
335   template<class T>
336   void DoLoadKeyedSIMD128ExternalArray(LLoadKeyed* instr);
337   void DoLoadKeyedFixedDoubleArray(LLoadKeyed* instr);
338   void DoLoadKeyedFixedArray(LLoadKeyed* instr);
339   void DoStoreKeyedExternalArray(LStoreKeyed* instr);
340   template<class T>
341   void DoStoreKeyedSIMD128ExternalArray(LStoreKeyed* instr);
342   void DoStoreKeyedFixedDoubleArray(LStoreKeyed* instr);
343   void DoStoreKeyedFixedArray(LStoreKeyed* instr);
344
345   void EmitReturn(LReturn* instr, bool dynamic_frame_alignment);
346
347   // Emits code for pushing either a tagged constant, a (non-double)
348   // register, or a stack slot operand.
349   void EmitPushTaggedOperand(LOperand* operand);
350
351   friend class LGapResolver;
352
353 #ifdef _MSC_VER
354   // On windows, you may not access the stack more than one page below
355   // the most recently mapped page. To make the allocated area randomly
356   // accessible, we write an arbitrary value to each page in range
357   // esp + offset - page_size .. esp in turn.
358   void MakeSureStackPagesMapped(int offset);
359 #endif
360
361   ZoneList<LEnvironment*> deoptimizations_;
362   ZoneList<Deoptimizer::JumpTableEntry> jump_table_;
363   ZoneList<Handle<Object> > deoptimization_literals_;
364   int inlined_function_count_;
365   Scope* const scope_;
366   TranslationBuffer translations_;
367   ZoneList<LDeferredCode*> deferred_;
368   bool dynamic_frame_alignment_;
369   bool support_aligned_spilled_doubles_;
370   int osr_pc_offset_;
371   bool frame_is_built_;
372
373   // Builder that keeps track of safepoints in the code. The table
374   // itself is emitted at the end of the generated code.
375   SafepointTableBuilder safepoints_;
376
377   // Compiler from a set of parallel moves to a sequential list of moves.
378   LGapResolver resolver_;
379
380   Safepoint::Kind expected_safepoint_kind_;
381
382   class PushSafepointRegistersScope V8_FINAL  BASE_EMBEDDED {
383    public:
384     explicit PushSafepointRegistersScope(LCodeGen* codegen)
385         : codegen_(codegen) {
386       DCHECK(codegen_->expected_safepoint_kind_ == Safepoint::kSimple);
387       codegen_->masm_->PushSafepointRegisters();
388       codegen_->expected_safepoint_kind_ = Safepoint::kWithRegisters;
389       DCHECK(codegen_->info()->is_calling());
390     }
391
392     ~PushSafepointRegistersScope() {
393       DCHECK(codegen_->expected_safepoint_kind_ == Safepoint::kWithRegisters);
394       codegen_->masm_->PopSafepointRegisters();
395       codegen_->expected_safepoint_kind_ = Safepoint::kSimple;
396     }
397
398    private:
399     LCodeGen* codegen_;
400   };
401
402   friend class LDeferredCode;
403   friend class LEnvironment;
404   friend class SafepointGenerator;
405   DISALLOW_COPY_AND_ASSIGN(LCodeGen);
406 };
407
408
409 class LDeferredCode : public ZoneObject {
410  public:
411   explicit LDeferredCode(LCodeGen* codegen)
412       : codegen_(codegen),
413         external_exit_(NULL),
414         instruction_index_(codegen->current_instruction_) {
415     codegen->AddDeferredCode(this);
416   }
417
418   virtual ~LDeferredCode() {}
419   virtual void Generate() = 0;
420   virtual LInstruction* instr() = 0;
421
422   void SetExit(Label* exit) { external_exit_ = exit; }
423   Label* entry() { return &entry_; }
424   Label* exit() { return external_exit_ != NULL ? external_exit_ : &exit_; }
425   Label* done() { return codegen_->NeedsDeferredFrame() ? &done_ : exit(); }
426   int instruction_index() const { return instruction_index_; }
427
428  protected:
429   LCodeGen* codegen() const { return codegen_; }
430   MacroAssembler* masm() const { return codegen_->masm(); }
431
432  private:
433   LCodeGen* codegen_;
434   Label entry_;
435   Label exit_;
436   Label* external_exit_;
437   Label done_;
438   int instruction_index_;
439 };
440
441 } }  // namespace v8::internal
442
443 #endif  // V8_IA32_LITHIUM_CODEGEN_IA32_H_