Upstream version 9.38.198.0
[platform/framework/web/crosswalk.git] / src / v8 / src / x64 / lithium-codegen-x64.h
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #ifndef V8_X64_LITHIUM_CODEGEN_X64_H_
6 #define V8_X64_LITHIUM_CODEGEN_X64_H_
7
8 #include "src/x64/lithium-x64.h"
9
10 #include "src/base/logging.h"
11 #include "src/deoptimizer.h"
12 #include "src/lithium-codegen.h"
13 #include "src/safepoint-table.h"
14 #include "src/scopes.h"
15 #include "src/utils.h"
16 #include "src/x64/lithium-gap-resolver-x64.h"
17
18 namespace v8 {
19 namespace internal {
20
21 // Forward declarations.
22 class LDeferredCode;
23 class SafepointGenerator;
24
25 class LCodeGen: public LCodeGenBase {
26  public:
27   LCodeGen(LChunk* chunk, MacroAssembler* assembler, CompilationInfo* info)
28       : LCodeGenBase(chunk, assembler, info),
29         deoptimizations_(4, info->zone()),
30         jump_table_(4, info->zone()),
31         deoptimization_literals_(8, info->zone()),
32         inlined_function_count_(0),
33         scope_(info->scope()),
34         translations_(info->zone()),
35         deferred_(8, info->zone()),
36         osr_pc_offset_(-1),
37         frame_is_built_(false),
38         safepoints_(info->zone()),
39         resolver_(this),
40         expected_safepoint_kind_(Safepoint::kSimple) {
41     PopulateDeoptimizationLiteralsWithInlinedFunctions();
42   }
43
44   int LookupDestination(int block_id) const {
45     return chunk()->LookupDestination(block_id);
46   }
47
48   bool IsNextEmittedBlock(int block_id) const {
49     return LookupDestination(block_id) == GetNextEmittedBlock();
50   }
51
52   bool NeedsEagerFrame() const {
53     return GetStackSlotCount() > 0 ||
54         info()->is_non_deferred_calling() ||
55         !info()->IsStub() ||
56         info()->requires_frame();
57   }
58   bool NeedsDeferredFrame() const {
59     return !NeedsEagerFrame() && info()->is_deferred_calling();
60   }
61
62   // Support for converting LOperands to assembler types.
63   Register ToRegister(LOperand* op) const;
64   XMMRegister ToDoubleRegister(LOperand* op) const;
65   bool IsInteger32Constant(LConstantOperand* op) const;
66   bool IsDehoistedKeyConstant(LConstantOperand* op) const;
67   bool IsSmiConstant(LConstantOperand* op) const;
68   int32_t ToRepresentation(LConstantOperand* op, const Representation& r) const;
69   int32_t ToInteger32(LConstantOperand* op) const;
70   Smi* ToSmi(LConstantOperand* op) const;
71   double ToDouble(LConstantOperand* op) const;
72   ExternalReference ToExternalReference(LConstantOperand* op) const;
73   Handle<Object> ToHandle(LConstantOperand* op) const;
74   Operand ToOperand(LOperand* op) const;
75
76   // Try to generate code for the entire chunk, but it may fail if the
77   // chunk contains constructs we cannot handle. Returns true if the
78   // code generation attempt succeeded.
79   bool GenerateCode();
80
81   // Finish the code by setting stack height, safepoint, and bailout
82   // information on it.
83   void FinishCode(Handle<Code> code);
84
85   // Deferred code support.
86   void DoDeferredNumberTagD(LNumberTagD* instr);
87
88   enum IntegerSignedness { SIGNED_INT32, UNSIGNED_INT32 };
89   void DoDeferredNumberTagIU(LInstruction* instr,
90                              LOperand* value,
91                              LOperand* temp1,
92                              LOperand* temp2,
93                              IntegerSignedness signedness);
94
95   void DoDeferredTaggedToI(LTaggedToI* instr, Label* done);
96   void DoDeferredMathAbsTaggedHeapNumber(LMathAbs* instr);
97   void DoDeferredStackCheck(LStackCheck* instr);
98   void DoDeferredStringCharCodeAt(LStringCharCodeAt* instr);
99   void DoDeferredStringCharFromCode(LStringCharFromCode* instr);
100   void DoDeferredAllocate(LAllocate* instr);
101   void DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
102                                        Label* map_check);
103   void DoDeferredInstanceMigration(LCheckMaps* instr, Register object);
104   void DoDeferredLoadMutableDouble(LLoadFieldByIndex* instr,
105                                    Register object,
106                                    Register index);
107
108 // Parallel move support.
109   void DoParallelMove(LParallelMove* move);
110   void DoGap(LGap* instr);
111
112   // Emit frame translation commands for an environment.
113   void WriteTranslation(LEnvironment* environment, Translation* translation);
114
115   // Declare methods that deal with the individual node types.
116 #define DECLARE_DO(type) void Do##type(L##type* node);
117   LITHIUM_CONCRETE_INSTRUCTION_LIST(DECLARE_DO)
118 #undef DECLARE_DO
119
120  private:
121   StrictMode strict_mode() const { return info()->strict_mode(); }
122
123   LPlatformChunk* chunk() const { return chunk_; }
124   Scope* scope() const { return scope_; }
125   HGraph* graph() const { return chunk()->graph(); }
126
127   XMMRegister double_scratch0() const { return xmm0; }
128
129   void EmitClassOfTest(Label* if_true,
130                        Label* if_false,
131                        Handle<String> class_name,
132                        Register input,
133                        Register temporary,
134                        Register scratch);
135
136   int GetStackSlotCount() const { return chunk()->spill_slot_count(); }
137
138   void AddDeferredCode(LDeferredCode* code) { deferred_.Add(code, zone()); }
139
140
141   void SaveCallerDoubles();
142   void RestoreCallerDoubles();
143
144   // Code generation passes.  Returns true if code generation should
145   // continue.
146   void GenerateBodyInstructionPre(LInstruction* instr) V8_OVERRIDE;
147   void GenerateBodyInstructionPost(LInstruction* instr) V8_OVERRIDE;
148   bool GeneratePrologue();
149   bool GenerateDeferredCode();
150   bool GenerateJumpTable();
151   bool GenerateSafepointTable();
152
153   // Generates the custom OSR entrypoint and sets the osr_pc_offset.
154   void GenerateOsrPrologue();
155
156   enum SafepointMode {
157     RECORD_SIMPLE_SAFEPOINT,
158     RECORD_SAFEPOINT_WITH_REGISTERS
159   };
160
161   void CallCodeGeneric(Handle<Code> code,
162                        RelocInfo::Mode mode,
163                        LInstruction* instr,
164                        SafepointMode safepoint_mode,
165                        int argc);
166
167
168   void CallCode(Handle<Code> code,
169                 RelocInfo::Mode mode,
170                 LInstruction* instr);
171
172   void CallRuntime(const Runtime::Function* function,
173                    int num_arguments,
174                    LInstruction* instr,
175                    SaveFPRegsMode save_doubles = kDontSaveFPRegs);
176
177   void CallRuntime(Runtime::FunctionId id,
178                    int num_arguments,
179                    LInstruction* instr) {
180     const Runtime::Function* function = Runtime::FunctionForId(id);
181     CallRuntime(function, num_arguments, instr);
182   }
183
184   void CallRuntimeFromDeferred(Runtime::FunctionId id,
185                                int argc,
186                                LInstruction* instr,
187                                LOperand* context);
188
189   void LoadContextFromDeferred(LOperand* context);
190
191   enum RDIState {
192     RDI_UNINITIALIZED,
193     RDI_CONTAINS_TARGET
194   };
195
196   // Generate a direct call to a known function.  Expects the function
197   // to be in rdi.
198   void CallKnownFunction(Handle<JSFunction> function,
199                          int formal_parameter_count,
200                          int arity,
201                          LInstruction* instr,
202                          RDIState rdi_state);
203
204   void RecordSafepointWithLazyDeopt(LInstruction* instr,
205                                     SafepointMode safepoint_mode,
206                                     int argc);
207   void RegisterEnvironmentForDeoptimization(LEnvironment* environment,
208                                             Safepoint::DeoptMode mode);
209   void DeoptimizeIf(Condition cc,
210                     LEnvironment* environment,
211                     Deoptimizer::BailoutType bailout_type);
212   void DeoptimizeIf(Condition cc, LEnvironment* environment);
213
214   bool DeoptEveryNTimes() {
215     return FLAG_deopt_every_n_times != 0 && !info()->IsStub();
216   }
217
218   void AddToTranslation(LEnvironment* environment,
219                         Translation* translation,
220                         LOperand* op,
221                         bool is_tagged,
222                         bool is_uint32,
223                         int* object_index_pointer,
224                         int* dematerialized_index_pointer);
225   void PopulateDeoptimizationData(Handle<Code> code);
226   int DefineDeoptimizationLiteral(Handle<Object> literal);
227
228   void PopulateDeoptimizationLiteralsWithInlinedFunctions();
229
230   Register ToRegister(int index) const;
231   XMMRegister ToDoubleRegister(int index) const;
232   Operand BuildFastArrayOperand(
233       LOperand* elements_pointer,
234       LOperand* key,
235       Representation key_representation,
236       ElementsKind elements_kind,
237       uint32_t base_offset);
238
239   Operand BuildSeqStringOperand(Register string,
240                                 LOperand* index,
241                                 String::Encoding encoding);
242
243   void EmitIntegerMathAbs(LMathAbs* instr);
244   void EmitSmiMathAbs(LMathAbs* instr);
245
246   // Support for recording safepoint and position information.
247   void RecordSafepoint(LPointerMap* pointers,
248                        Safepoint::Kind kind,
249                        int arguments,
250                        Safepoint::DeoptMode mode);
251   void RecordSafepoint(LPointerMap* pointers, Safepoint::DeoptMode mode);
252   void RecordSafepoint(Safepoint::DeoptMode mode);
253   void RecordSafepointWithRegisters(LPointerMap* pointers,
254                                     int arguments,
255                                     Safepoint::DeoptMode mode);
256   void RecordAndWritePosition(int position) V8_OVERRIDE;
257
258   static Condition TokenToCondition(Token::Value op, bool is_unsigned);
259   void EmitGoto(int block);
260
261   // EmitBranch expects to be the last instruction of a block.
262   template<class InstrType>
263   void EmitBranch(InstrType instr, Condition cc);
264   template<class InstrType>
265   void EmitFalseBranch(InstrType instr, Condition cc);
266   void EmitNumberUntagD(
267       Register input,
268       XMMRegister result,
269       bool allow_undefined_as_nan,
270       bool deoptimize_on_minus_zero,
271       LEnvironment* env,
272       NumberUntagDMode mode = NUMBER_CANDIDATE_IS_ANY_TAGGED);
273
274   // Emits optimized code for typeof x == "y".  Modifies input register.
275   // Returns the condition on which a final split to
276   // true and false label should be made, to optimize fallthrough.
277   Condition EmitTypeofIs(LTypeofIsAndBranch* instr, Register input);
278
279   // Emits optimized code for %_IsObject(x).  Preserves input register.
280   // Returns the condition on which a final split to
281   // true and false label should be made, to optimize fallthrough.
282   Condition EmitIsObject(Register input,
283                          Label* is_not_object,
284                          Label* is_object);
285
286   // Emits optimized code for %_IsString(x).  Preserves input register.
287   // Returns the condition on which a final split to
288   // true and false label should be made, to optimize fallthrough.
289   Condition EmitIsString(Register input,
290                          Register temp1,
291                          Label* is_not_string,
292                          SmiCheck check_needed);
293
294   // Emits optimized code for %_IsConstructCall().
295   // Caller should branch on equal condition.
296   void EmitIsConstructCall(Register temp);
297
298   // Emits code for pushing either a tagged constant, a (non-double)
299   // register, or a stack slot operand.
300   void EmitPushTaggedOperand(LOperand* operand);
301
302   // Emits optimized code to deep-copy the contents of statically known
303   // object graphs (e.g. object literal boilerplate).
304   void EmitDeepCopy(Handle<JSObject> object,
305                     Register result,
306                     Register source,
307                     int* offset,
308                     AllocationSiteMode mode);
309
310   void EnsureSpaceForLazyDeopt(int space_needed) V8_OVERRIDE;
311   void DoLoadKeyedExternalArray(LLoadKeyed* instr);
312   void DoLoadKeyedFixedDoubleArray(LLoadKeyed* instr);
313   void DoLoadKeyedFixedArray(LLoadKeyed* instr);
314   void DoStoreKeyedExternalArray(LStoreKeyed* instr);
315   void DoStoreKeyedFixedDoubleArray(LStoreKeyed* instr);
316   void DoStoreKeyedFixedArray(LStoreKeyed* instr);
317 #ifdef _MSC_VER
318   // On windows, you may not access the stack more than one page below
319   // the most recently mapped page. To make the allocated area randomly
320   // accessible, we write an arbitrary value to each page in range
321   // rsp + offset - page_size .. rsp in turn.
322   void MakeSureStackPagesMapped(int offset);
323 #endif
324
325   ZoneList<LEnvironment*> deoptimizations_;
326   ZoneList<Deoptimizer::JumpTableEntry> jump_table_;
327   ZoneList<Handle<Object> > deoptimization_literals_;
328   int inlined_function_count_;
329   Scope* const scope_;
330   TranslationBuffer translations_;
331   ZoneList<LDeferredCode*> deferred_;
332   int osr_pc_offset_;
333   bool frame_is_built_;
334
335   // Builder that keeps track of safepoints in the code. The table
336   // itself is emitted at the end of the generated code.
337   SafepointTableBuilder safepoints_;
338
339   // Compiler from a set of parallel moves to a sequential list of moves.
340   LGapResolver resolver_;
341
342   Safepoint::Kind expected_safepoint_kind_;
343
344   class PushSafepointRegistersScope V8_FINAL BASE_EMBEDDED {
345    public:
346     explicit PushSafepointRegistersScope(LCodeGen* codegen)
347         : codegen_(codegen) {
348       DCHECK(codegen_->info()->is_calling());
349       DCHECK(codegen_->expected_safepoint_kind_ == Safepoint::kSimple);
350       codegen_->masm_->PushSafepointRegisters();
351       codegen_->expected_safepoint_kind_ = Safepoint::kWithRegisters;
352     }
353
354     ~PushSafepointRegistersScope() {
355       DCHECK(codegen_->expected_safepoint_kind_ == Safepoint::kWithRegisters);
356       codegen_->masm_->PopSafepointRegisters();
357       codegen_->expected_safepoint_kind_ = Safepoint::kSimple;
358     }
359
360    private:
361     LCodeGen* codegen_;
362   };
363
364   friend class LDeferredCode;
365   friend class LEnvironment;
366   friend class SafepointGenerator;
367   DISALLOW_COPY_AND_ASSIGN(LCodeGen);
368 };
369
370
371 class LDeferredCode: public ZoneObject {
372  public:
373   explicit LDeferredCode(LCodeGen* codegen)
374       : codegen_(codegen),
375         external_exit_(NULL),
376         instruction_index_(codegen->current_instruction_) {
377     codegen->AddDeferredCode(this);
378   }
379
380   virtual ~LDeferredCode() {}
381   virtual void Generate() = 0;
382   virtual LInstruction* instr() = 0;
383
384   void SetExit(Label* exit) { external_exit_ = exit; }
385   Label* entry() { return &entry_; }
386   Label* exit() { return external_exit_ != NULL ? external_exit_ : &exit_; }
387   Label* done() { return codegen_->NeedsDeferredFrame() ? &done_ : exit(); }
388   int instruction_index() const { return instruction_index_; }
389
390  protected:
391   LCodeGen* codegen() const { return codegen_; }
392   MacroAssembler* masm() const { return codegen_->masm(); }
393
394  private:
395   LCodeGen* codegen_;
396   Label entry_;
397   Label exit_;
398   Label done_;
399   Label* external_exit_;
400   int instruction_index_;
401 };
402
403 } }  // namespace v8::internal
404
405 #endif  // V8_X64_LITHIUM_CODEGEN_X64_H_