Upstream version 5.34.104.0
[platform/framework/web/crosswalk.git] / src / v8 / src / x64 / lithium-codegen-x64.h
1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
4 // met:
5 //
6 //     * Redistributions of source code must retain the above copyright
7 //       notice, this list of conditions and the following disclaimer.
8 //     * Redistributions in binary form must reproduce the above
9 //       copyright notice, this list of conditions and the following
10 //       disclaimer in the documentation and/or other materials provided
11 //       with the distribution.
12 //     * Neither the name of Google Inc. nor the names of its
13 //       contributors may be used to endorse or promote products derived
14 //       from this software without specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28 #ifndef V8_X64_LITHIUM_CODEGEN_X64_H_
29 #define V8_X64_LITHIUM_CODEGEN_X64_H_
30
31 #include "x64/lithium-x64.h"
32
33 #include "checks.h"
34 #include "deoptimizer.h"
35 #include "lithium-codegen.h"
36 #include "safepoint-table.h"
37 #include "scopes.h"
38 #include "v8utils.h"
39 #include "x64/lithium-gap-resolver-x64.h"
40
41 namespace v8 {
42 namespace internal {
43
44 // Forward declarations.
45 class LDeferredCode;
46 class SafepointGenerator;
47
48 class LCodeGen: public LCodeGenBase {
49  public:
50   LCodeGen(LChunk* chunk, MacroAssembler* assembler, CompilationInfo* info)
51       : LCodeGenBase(chunk, assembler, info),
52         deoptimizations_(4, info->zone()),
53         jump_table_(4, info->zone()),
54         deoptimization_literals_(8, info->zone()),
55         inlined_function_count_(0),
56         scope_(info->scope()),
57         translations_(info->zone()),
58         deferred_(8, info->zone()),
59         osr_pc_offset_(-1),
60         frame_is_built_(false),
61         safepoints_(info->zone()),
62         resolver_(this),
63         expected_safepoint_kind_(Safepoint::kSimple) {
64     PopulateDeoptimizationLiteralsWithInlinedFunctions();
65   }
66
67   int LookupDestination(int block_id) const {
68     return chunk()->LookupDestination(block_id);
69   }
70
71   bool IsNextEmittedBlock(int block_id) const {
72     return LookupDestination(block_id) == GetNextEmittedBlock();
73   }
74
75   bool NeedsEagerFrame() const {
76     return GetStackSlotCount() > 0 ||
77         info()->is_non_deferred_calling() ||
78         !info()->IsStub() ||
79         info()->requires_frame();
80   }
81   bool NeedsDeferredFrame() const {
82     return !NeedsEagerFrame() && info()->is_deferred_calling();
83   }
84
85   // Support for converting LOperands to assembler types.
86   Register ToRegister(LOperand* op) const;
87   XMMRegister ToDoubleRegister(LOperand* op) const;
88   XMMRegister ToFloat32x4Register(LOperand* op) const;
89   XMMRegister ToInt32x4Register(LOperand* op) const;
90   XMMRegister ToSIMD128Register(LOperand* op) const;
91   bool IsInteger32Constant(LConstantOperand* op) const;
92   bool IsSmiConstant(LConstantOperand* op) const;
93   int32_t ToInteger32(LConstantOperand* op) const;
94   Smi* ToSmi(LConstantOperand* op) const;
95   double ToDouble(LConstantOperand* op) const;
96   ExternalReference ToExternalReference(LConstantOperand* op) const;
97   bool IsTaggedConstant(LConstantOperand* op) const;
98   Handle<Object> ToHandle(LConstantOperand* op) const;
99   Operand ToOperand(LOperand* op) const;
100
101   // Try to generate code for the entire chunk, but it may fail if the
102   // chunk contains constructs we cannot handle. Returns true if the
103   // code generation attempt succeeded.
104   bool GenerateCode();
105
106   // Finish the code by setting stack height, safepoint, and bailout
107   // information on it.
108   void FinishCode(Handle<Code> code);
109
110   // Deferred code support.
111   void DoDeferredNumberTagD(LNumberTagD* instr);
112   void DoDeferredNumberTagU(LNumberTagU* instr);
113   void DoDeferredTaggedToI(LTaggedToI* instr, Label* done);
114   void DoDeferredMathAbsTaggedHeapNumber(LMathAbs* instr);
115   void DoDeferredStackCheck(LStackCheck* instr);
116   void DoDeferredStringCharCodeAt(LStringCharCodeAt* instr);
117   void DoDeferredStringCharFromCode(LStringCharFromCode* instr);
118   void DoDeferredAllocate(LAllocate* instr);
119   void DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
120                                        Label* map_check);
121   void DoDeferredInstanceMigration(LCheckMaps* instr, Register object);
122   void DoDeferredSIMD128ToTagged(LSIMD128ToTagged* instr,
123                                  Runtime::FunctionId id);
124
125   template<class T>
126   void HandleTaggedToSIMD128(LTaggedToSIMD128* instr);
127   template<class T>
128   void HandleSIMD128ToTagged(LSIMD128ToTagged* instr);
129
130 // Parallel move support.
131   void DoParallelMove(LParallelMove* move);
132   void DoGap(LGap* instr);
133
134   // Emit frame translation commands for an environment.
135   void WriteTranslation(LEnvironment* environment, Translation* translation);
136
137   // Declare methods that deal with the individual node types.
138 #define DECLARE_DO(type) void Do##type(L##type* node);
139   LITHIUM_CONCRETE_INSTRUCTION_LIST(DECLARE_DO)
140 #undef DECLARE_DO
141
142  private:
143   StrictModeFlag strict_mode_flag() const {
144     return info()->is_classic_mode() ? kNonStrictMode : kStrictMode;
145   }
146
147   LPlatformChunk* chunk() const { return chunk_; }
148   Scope* scope() const { return scope_; }
149   HGraph* graph() const { return chunk()->graph(); }
150
151   XMMRegister double_scratch0() const { return xmm0; }
152
153   void EmitClassOfTest(Label* if_true,
154                        Label* if_false,
155                        Handle<String> class_name,
156                        Register input,
157                        Register temporary,
158                        Register scratch);
159
160   int GetStackSlotCount() const { return chunk()->spill_slot_count(); }
161
162   void Abort(BailoutReason reason);
163
164   void AddDeferredCode(LDeferredCode* code) { deferred_.Add(code, zone()); }
165
166
167   void SaveCallerDoubles();
168   void RestoreCallerDoubles();
169
170   // Code generation passes.  Returns true if code generation should
171   // continue.
172   bool GeneratePrologue();
173   bool GenerateDeferredCode();
174   bool GenerateJumpTable();
175   bool GenerateSafepointTable();
176
177   // Generates the custom OSR entrypoint and sets the osr_pc_offset.
178   void GenerateOsrPrologue();
179
180   enum SafepointMode {
181     RECORD_SIMPLE_SAFEPOINT,
182     RECORD_SAFEPOINT_WITH_REGISTERS
183   };
184
185   void CallCodeGeneric(Handle<Code> code,
186                        RelocInfo::Mode mode,
187                        LInstruction* instr,
188                        SafepointMode safepoint_mode,
189                        int argc);
190
191
192   void CallCode(Handle<Code> code,
193                 RelocInfo::Mode mode,
194                 LInstruction* instr);
195
196   void CallRuntime(const Runtime::Function* function,
197                    int num_arguments,
198                    LInstruction* instr,
199                    SaveFPRegsMode save_doubles = kDontSaveFPRegs);
200
201   void CallRuntime(Runtime::FunctionId id,
202                    int num_arguments,
203                    LInstruction* instr) {
204     const Runtime::Function* function = Runtime::FunctionForId(id);
205     CallRuntime(function, num_arguments, instr);
206   }
207
208   void CallRuntimeFromDeferred(Runtime::FunctionId id,
209                                int argc,
210                                LInstruction* instr,
211                                LOperand* context);
212
213   void LoadContextFromDeferred(LOperand* context);
214
215   enum RDIState {
216     RDI_UNINITIALIZED,
217     RDI_CONTAINS_TARGET
218   };
219
220   // Generate a direct call to a known function.  Expects the function
221   // to be in rdi.
222   void CallKnownFunction(Handle<JSFunction> function,
223                          int formal_parameter_count,
224                          int arity,
225                          LInstruction* instr,
226                          RDIState rdi_state);
227
228   void RecordSafepointWithLazyDeopt(LInstruction* instr,
229                                     SafepointMode safepoint_mode,
230                                     int argc);
231   void RegisterEnvironmentForDeoptimization(LEnvironment* environment,
232                                             Safepoint::DeoptMode mode);
233   void DeoptimizeIf(Condition cc,
234                     LEnvironment* environment,
235                     Deoptimizer::BailoutType bailout_type);
236   void DeoptimizeIf(Condition cc, LEnvironment* environment);
237   void ApplyCheckIf(Condition cc, LBoundsCheck* check);
238
239   bool DeoptEveryNTimes() {
240     return FLAG_deopt_every_n_times != 0 && !info()->IsStub();
241   }
242
243   void AddToTranslation(LEnvironment* environment,
244                         Translation* translation,
245                         LOperand* op,
246                         bool is_tagged,
247                         bool is_uint32,
248                         int* object_index_pointer,
249                         int* dematerialized_index_pointer);
250   void PopulateDeoptimizationData(Handle<Code> code);
251   int DefineDeoptimizationLiteral(Handle<Object> literal);
252
253   void PopulateDeoptimizationLiteralsWithInlinedFunctions();
254
255   Register ToRegister(int index) const;
256   XMMRegister ToDoubleRegister(int index) const;
257   XMMRegister ToSIMD128Register(int index) const;
258   Operand BuildFastArrayOperand(
259       LOperand* elements_pointer,
260       LOperand* key,
261       ElementsKind elements_kind,
262       uint32_t offset,
263       uint32_t additional_index = 0);
264
265   Operand BuildSeqStringOperand(Register string,
266                                 LOperand* index,
267                                 String::Encoding encoding);
268
269   void EmitIntegerMathAbs(LMathAbs* instr);
270   void EmitSmiMathAbs(LMathAbs* instr);
271
272   // Support for recording safepoint and position information.
273   void RecordSafepoint(LPointerMap* pointers,
274                        Safepoint::Kind kind,
275                        int arguments,
276                        Safepoint::DeoptMode mode);
277   void RecordSafepoint(LPointerMap* pointers, Safepoint::DeoptMode mode);
278   void RecordSafepoint(Safepoint::DeoptMode mode);
279   void RecordSafepointWithRegisters(LPointerMap* pointers,
280                                     int arguments,
281                                     Safepoint::DeoptMode mode);
282   void RecordAndWritePosition(int position) V8_OVERRIDE;
283
284   static Condition TokenToCondition(Token::Value op, bool is_unsigned);
285   void EmitGoto(int block);
286
287   // EmitBranch expects to be the last instruction of a block.
288   template<class InstrType>
289   void EmitBranch(InstrType instr, Condition cc);
290   template<class InstrType>
291   void EmitFalseBranch(InstrType instr, Condition cc);
292   void EmitNumberUntagD(
293       Register input,
294       XMMRegister result,
295       bool allow_undefined_as_nan,
296       bool deoptimize_on_minus_zero,
297       LEnvironment* env,
298       NumberUntagDMode mode = NUMBER_CANDIDATE_IS_ANY_TAGGED);
299
300   // Emits optimized code for typeof x == "y".  Modifies input register.
301   // Returns the condition on which a final split to
302   // true and false label should be made, to optimize fallthrough.
303   Condition EmitTypeofIs(LTypeofIsAndBranch* instr, Register input);
304
305   // Emits optimized code for %_IsObject(x).  Preserves input register.
306   // Returns the condition on which a final split to
307   // true and false label should be made, to optimize fallthrough.
308   Condition EmitIsObject(Register input,
309                          Label* is_not_object,
310                          Label* is_object);
311
312   // Emits optimized code for %_IsString(x).  Preserves input register.
313   // Returns the condition on which a final split to
314   // true and false label should be made, to optimize fallthrough.
315   Condition EmitIsString(Register input,
316                          Register temp1,
317                          Label* is_not_string,
318                          SmiCheck check_needed);
319
320   // Emits optimized code for %_IsConstructCall().
321   // Caller should branch on equal condition.
322   void EmitIsConstructCall(Register temp);
323
324   // Emits code for pushing either a tagged constant, a (non-double)
325   // register, or a stack slot operand.
326   void EmitPushTaggedOperand(LOperand* operand);
327
328   // Emits optimized code to deep-copy the contents of statically known
329   // object graphs (e.g. object literal boilerplate).
330   void EmitDeepCopy(Handle<JSObject> object,
331                     Register result,
332                     Register source,
333                     int* offset,
334                     AllocationSiteMode mode);
335
336   void EnsureSpaceForLazyDeopt(int space_needed) V8_OVERRIDE;
337   void DoLoadKeyedExternalArray(LLoadKeyed* instr);
338   void HandleExternalArrayOpRequiresPreScale(LOperand* key,
339                                              ElementsKind elements_kind);
340   void DoLoadKeyedFixedDoubleArray(LLoadKeyed* instr);
341   void DoLoadKeyedFixedArray(LLoadKeyed* instr);
342   void DoStoreKeyedExternalArray(LStoreKeyed* instr);
343   void DoStoreKeyedFixedDoubleArray(LStoreKeyed* instr);
344   void DoStoreKeyedFixedArray(LStoreKeyed* instr);
345 #ifdef _MSC_VER
346   // On windows, you may not access the stack more than one page below
347   // the most recently mapped page. To make the allocated area randomly
348   // accessible, we write an arbitrary value to each page in range
349   // rsp + offset - page_size .. rsp in turn.
350   void MakeSureStackPagesMapped(int offset);
351 #endif
352
353   ZoneList<LEnvironment*> deoptimizations_;
354   ZoneList<Deoptimizer::JumpTableEntry> jump_table_;
355   ZoneList<Handle<Object> > deoptimization_literals_;
356   int inlined_function_count_;
357   Scope* const scope_;
358   TranslationBuffer translations_;
359   ZoneList<LDeferredCode*> deferred_;
360   int osr_pc_offset_;
361   bool frame_is_built_;
362
363   // Builder that keeps track of safepoints in the code. The table
364   // itself is emitted at the end of the generated code.
365   SafepointTableBuilder safepoints_;
366
367   // Compiler from a set of parallel moves to a sequential list of moves.
368   LGapResolver resolver_;
369
370   Safepoint::Kind expected_safepoint_kind_;
371
372   class PushSafepointRegistersScope V8_FINAL BASE_EMBEDDED {
373    public:
374     explicit PushSafepointRegistersScope(LCodeGen* codegen)
375         : codegen_(codegen) {
376       ASSERT(codegen_->info()->is_calling());
377       ASSERT(codegen_->expected_safepoint_kind_ == Safepoint::kSimple);
378       codegen_->masm_->PushSafepointRegisters();
379       codegen_->expected_safepoint_kind_ = Safepoint::kWithRegisters;
380     }
381
382     ~PushSafepointRegistersScope() {
383       ASSERT(codegen_->expected_safepoint_kind_ == Safepoint::kWithRegisters);
384       codegen_->masm_->PopSafepointRegisters();
385       codegen_->expected_safepoint_kind_ = Safepoint::kSimple;
386     }
387
388    private:
389     LCodeGen* codegen_;
390   };
391
392   friend class LDeferredCode;
393   friend class LEnvironment;
394   friend class SafepointGenerator;
395   DISALLOW_COPY_AND_ASSIGN(LCodeGen);
396 };
397
398
399 class LDeferredCode: public ZoneObject {
400  public:
401   explicit LDeferredCode(LCodeGen* codegen)
402       : codegen_(codegen),
403         external_exit_(NULL),
404         instruction_index_(codegen->current_instruction_) {
405     codegen->AddDeferredCode(this);
406   }
407
408   virtual ~LDeferredCode() {}
409   virtual void Generate() = 0;
410   virtual LInstruction* instr() = 0;
411
412   void SetExit(Label* exit) { external_exit_ = exit; }
413   Label* entry() { return &entry_; }
414   Label* exit() { return external_exit_ != NULL ? external_exit_ : &exit_; }
415   Label* done() { return codegen_->NeedsDeferredFrame() ? &done_ : exit(); }
416   int instruction_index() const { return instruction_index_; }
417
418  protected:
419   LCodeGen* codegen() const { return codegen_; }
420   MacroAssembler* masm() const { return codegen_->masm(); }
421
422  private:
423   LCodeGen* codegen_;
424   Label entry_;
425   Label exit_;
426   Label done_;
427   Label* external_exit_;
428   int instruction_index_;
429 };
430
431 } }  // namespace v8::internal
432
433 #endif  // V8_X64_LITHIUM_CODEGEN_X64_H_