1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #ifndef V8_X64_LITHIUM_CODEGEN_X64_H_
6 #define V8_X64_LITHIUM_CODEGEN_X64_H_
8 #include "x64/lithium-x64.h"
11 #include "deoptimizer.h"
12 #include "lithium-codegen.h"
13 #include "safepoint-table.h"
16 #include "x64/lithium-gap-resolver-x64.h"
21 // Forward declarations.
23 class SafepointGenerator;
25 class LCodeGen: public LCodeGenBase {
27 LCodeGen(LChunk* chunk, MacroAssembler* assembler, CompilationInfo* info)
28 : LCodeGenBase(chunk, assembler, info),
29 deoptimizations_(4, info->zone()),
30 jump_table_(4, info->zone()),
31 deoptimization_literals_(8, info->zone()),
32 inlined_function_count_(0),
33 scope_(info->scope()),
34 translations_(info->zone()),
35 deferred_(8, info->zone()),
37 frame_is_built_(false),
38 safepoints_(info->zone()),
40 expected_safepoint_kind_(Safepoint::kSimple) {
41 PopulateDeoptimizationLiteralsWithInlinedFunctions();
44 int LookupDestination(int block_id) const {
45 return chunk()->LookupDestination(block_id);
48 bool IsNextEmittedBlock(int block_id) const {
49 return LookupDestination(block_id) == GetNextEmittedBlock();
52 bool NeedsEagerFrame() const {
53 return GetStackSlotCount() > 0 ||
54 info()->is_non_deferred_calling() ||
56 info()->requires_frame();
58 bool NeedsDeferredFrame() const {
59 return !NeedsEagerFrame() && info()->is_deferred_calling();
62 // Support for converting LOperands to assembler types.
63 Register ToRegister(LOperand* op) const;
64 XMMRegister ToDoubleRegister(LOperand* op) const;
65 XMMRegister ToFloat32x4Register(LOperand* op) const;
66 XMMRegister ToFloat64x2Register(LOperand* op) const;
67 XMMRegister ToInt32x4Register(LOperand* op) const;
68 XMMRegister ToSIMD128Register(LOperand* op) const;
69 bool IsInteger32Constant(LConstantOperand* op) const;
70 bool IsDehoistedKeyConstant(LConstantOperand* op) const;
71 bool IsSmiConstant(LConstantOperand* op) const;
72 int32_t ToInteger32(LConstantOperand* op) const;
73 Smi* ToSmi(LConstantOperand* op) const;
74 double ToDouble(LConstantOperand* op) const;
75 ExternalReference ToExternalReference(LConstantOperand* op) const;
76 Handle<Object> ToHandle(LConstantOperand* op) const;
77 Operand ToOperand(LOperand* op) const;
79 // Try to generate code for the entire chunk, but it may fail if the
80 // chunk contains constructs we cannot handle. Returns true if the
81 // code generation attempt succeeded.
84 // Finish the code by setting stack height, safepoint, and bailout
86 void FinishCode(Handle<Code> code);
88 // Deferred code support.
89 void DoDeferredNumberTagD(LNumberTagD* instr);
90 void DoDeferredNumberTagU(LNumberTagU* instr);
91 void DoDeferredTaggedToI(LTaggedToI* instr, Label* done);
92 void DoDeferredMathAbsTaggedHeapNumber(LMathAbs* instr);
93 void DoDeferredStackCheck(LStackCheck* instr);
94 void DoDeferredStringCharCodeAt(LStringCharCodeAt* instr);
95 void DoDeferredStringCharFromCode(LStringCharFromCode* instr);
96 void DoDeferredAllocate(LAllocate* instr);
97 void DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
99 void DoDeferredInstanceMigration(LCheckMaps* instr, Register object);
100 void DoDeferredLoadMutableDouble(LLoadFieldByIndex* instr,
103 void DoDeferredSIMD128ToTagged(LSIMD128ToTagged* instr,
104 Runtime::FunctionId id);
107 void HandleTaggedToSIMD128(LTaggedToSIMD128* instr);
109 void HandleSIMD128ToTagged(LSIMD128ToTagged* instr);
111 // Parallel move support.
112 void DoParallelMove(LParallelMove* move);
113 void DoGap(LGap* instr);
115 // Emit frame translation commands for an environment.
116 void WriteTranslation(LEnvironment* environment, Translation* translation);
118 // Declare methods that deal with the individual node types.
119 #define DECLARE_DO(type) void Do##type(L##type* node);
120 LITHIUM_CONCRETE_INSTRUCTION_LIST(DECLARE_DO)
124 StrictMode strict_mode() const { return info()->strict_mode(); }
126 LPlatformChunk* chunk() const { return chunk_; }
127 Scope* scope() const { return scope_; }
128 HGraph* graph() const { return chunk()->graph(); }
130 XMMRegister double_scratch0() const { return xmm0; }
132 void EmitClassOfTest(Label* if_true,
134 Handle<String> class_name,
139 int GetStackSlotCount() const { return chunk()->spill_slot_count(); }
141 void AddDeferredCode(LDeferredCode* code) { deferred_.Add(code, zone()); }
144 void SaveCallerDoubles();
145 void RestoreCallerDoubles();
147 // Code generation passes. Returns true if code generation should
149 void GenerateBodyInstructionPre(LInstruction* instr) V8_OVERRIDE;
150 void GenerateBodyInstructionPost(LInstruction* instr) V8_OVERRIDE;
151 bool GeneratePrologue();
152 bool GenerateDeferredCode();
153 bool GenerateJumpTable();
154 bool GenerateSafepointTable();
156 // Generates the custom OSR entrypoint and sets the osr_pc_offset.
157 void GenerateOsrPrologue();
160 RECORD_SIMPLE_SAFEPOINT,
161 RECORD_SAFEPOINT_WITH_REGISTERS
164 void CallCodeGeneric(Handle<Code> code,
165 RelocInfo::Mode mode,
167 SafepointMode safepoint_mode,
171 void CallCode(Handle<Code> code,
172 RelocInfo::Mode mode,
173 LInstruction* instr);
175 void CallRuntime(const Runtime::Function* function,
178 SaveFPRegsMode save_doubles = kDontSaveFPRegs);
180 void CallRuntime(Runtime::FunctionId id,
182 LInstruction* instr) {
183 const Runtime::Function* function = Runtime::FunctionForId(id);
184 CallRuntime(function, num_arguments, instr);
187 void CallRuntimeFromDeferred(Runtime::FunctionId id,
192 void LoadContextFromDeferred(LOperand* context);
199 // Generate a direct call to a known function. Expects the function
201 void CallKnownFunction(Handle<JSFunction> function,
202 int formal_parameter_count,
207 void RecordSafepointWithLazyDeopt(LInstruction* instr,
208 SafepointMode safepoint_mode,
210 void RegisterEnvironmentForDeoptimization(LEnvironment* environment,
211 Safepoint::DeoptMode mode);
212 void DeoptimizeIf(Condition cc,
213 LEnvironment* environment,
214 Deoptimizer::BailoutType bailout_type);
215 void DeoptimizeIf(Condition cc, LEnvironment* environment);
217 bool DeoptEveryNTimes() {
218 return FLAG_deopt_every_n_times != 0 && !info()->IsStub();
221 void AddToTranslation(LEnvironment* environment,
222 Translation* translation,
226 int* object_index_pointer,
227 int* dematerialized_index_pointer);
228 void PopulateDeoptimizationData(Handle<Code> code);
229 int DefineDeoptimizationLiteral(Handle<Object> literal);
231 void PopulateDeoptimizationLiteralsWithInlinedFunctions();
233 Register ToRegister(int index) const;
234 XMMRegister ToDoubleRegister(int index) const;
235 XMMRegister ToSIMD128Register(int index) const;
236 Operand BuildFastArrayOperand(
237 LOperand* elements_pointer,
239 ElementsKind elements_kind,
241 uint32_t additional_index = 0);
243 Operand BuildSeqStringOperand(Register string,
245 String::Encoding encoding);
247 void EmitIntegerMathAbs(LMathAbs* instr);
248 void EmitSmiMathAbs(LMathAbs* instr);
250 // Support for recording safepoint and position information.
251 void RecordSafepoint(LPointerMap* pointers,
252 Safepoint::Kind kind,
254 Safepoint::DeoptMode mode);
255 void RecordSafepoint(LPointerMap* pointers, Safepoint::DeoptMode mode);
256 void RecordSafepoint(Safepoint::DeoptMode mode);
257 void RecordSafepointWithRegisters(LPointerMap* pointers,
259 Safepoint::DeoptMode mode);
260 void RecordAndWritePosition(int position) V8_OVERRIDE;
262 static Condition TokenToCondition(Token::Value op, bool is_unsigned);
263 void EmitGoto(int block);
265 // EmitBranch expects to be the last instruction of a block.
266 template<class InstrType>
267 void EmitBranch(InstrType instr, Condition cc);
268 template<class InstrType>
269 void EmitFalseBranch(InstrType instr, Condition cc);
270 void EmitNumberUntagD(
273 bool allow_undefined_as_nan,
274 bool deoptimize_on_minus_zero,
276 NumberUntagDMode mode = NUMBER_CANDIDATE_IS_ANY_TAGGED);
278 // Emits optimized code for typeof x == "y". Modifies input register.
279 // Returns the condition on which a final split to
280 // true and false label should be made, to optimize fallthrough.
281 Condition EmitTypeofIs(LTypeofIsAndBranch* instr, Register input);
283 // Emits optimized code for %_IsObject(x). Preserves input register.
284 // Returns the condition on which a final split to
285 // true and false label should be made, to optimize fallthrough.
286 Condition EmitIsObject(Register input,
287 Label* is_not_object,
290 // Emits optimized code for %_IsString(x). Preserves input register.
291 // Returns the condition on which a final split to
292 // true and false label should be made, to optimize fallthrough.
293 Condition EmitIsString(Register input,
295 Label* is_not_string,
296 SmiCheck check_needed);
298 // Emits optimized code for %_IsConstructCall().
299 // Caller should branch on equal condition.
300 void EmitIsConstructCall(Register temp);
302 // Emits code for pushing either a tagged constant, a (non-double)
303 // register, or a stack slot operand.
304 void EmitPushTaggedOperand(LOperand* operand);
306 // Emits optimized code to deep-copy the contents of statically known
307 // object graphs (e.g. object literal boilerplate).
308 void EmitDeepCopy(Handle<JSObject> object,
312 AllocationSiteMode mode);
314 void EnsureSpaceForLazyDeopt(int space_needed) V8_OVERRIDE;
315 void DoLoadKeyedExternalArray(LLoadKeyed* instr);
316 void HandleExternalArrayOpRequiresPreScale(LOperand* key,
317 ElementsKind elements_kind);
318 void DoLoadKeyedFixedDoubleArray(LLoadKeyed* instr);
319 void DoLoadKeyedFixedArray(LLoadKeyed* instr);
320 void DoStoreKeyedExternalArray(LStoreKeyed* instr);
321 void DoStoreKeyedFixedDoubleArray(LStoreKeyed* instr);
322 void DoStoreKeyedFixedArray(LStoreKeyed* instr);
324 // On windows, you may not access the stack more than one page below
325 // the most recently mapped page. To make the allocated area randomly
326 // accessible, we write an arbitrary value to each page in range
327 // rsp + offset - page_size .. rsp in turn.
328 void MakeSureStackPagesMapped(int offset);
331 ZoneList<LEnvironment*> deoptimizations_;
332 ZoneList<Deoptimizer::JumpTableEntry> jump_table_;
333 ZoneList<Handle<Object> > deoptimization_literals_;
334 int inlined_function_count_;
336 TranslationBuffer translations_;
337 ZoneList<LDeferredCode*> deferred_;
339 bool frame_is_built_;
341 // Builder that keeps track of safepoints in the code. The table
342 // itself is emitted at the end of the generated code.
343 SafepointTableBuilder safepoints_;
345 // Compiler from a set of parallel moves to a sequential list of moves.
346 LGapResolver resolver_;
348 Safepoint::Kind expected_safepoint_kind_;
350 class PushSafepointRegistersScope V8_FINAL BASE_EMBEDDED {
352 explicit PushSafepointRegistersScope(LCodeGen* codegen)
353 : codegen_(codegen) {
354 ASSERT(codegen_->info()->is_calling());
355 ASSERT(codegen_->expected_safepoint_kind_ == Safepoint::kSimple);
356 codegen_->masm_->PushSafepointRegisters();
357 codegen_->expected_safepoint_kind_ = Safepoint::kWithRegisters;
360 ~PushSafepointRegistersScope() {
361 ASSERT(codegen_->expected_safepoint_kind_ == Safepoint::kWithRegisters);
362 codegen_->masm_->PopSafepointRegisters();
363 codegen_->expected_safepoint_kind_ = Safepoint::kSimple;
370 friend class LDeferredCode;
371 friend class LEnvironment;
372 friend class SafepointGenerator;
373 DISALLOW_COPY_AND_ASSIGN(LCodeGen);
377 class LDeferredCode: public ZoneObject {
379 explicit LDeferredCode(LCodeGen* codegen)
381 external_exit_(NULL),
382 instruction_index_(codegen->current_instruction_) {
383 codegen->AddDeferredCode(this);
386 virtual ~LDeferredCode() {}
387 virtual void Generate() = 0;
388 virtual LInstruction* instr() = 0;
390 void SetExit(Label* exit) { external_exit_ = exit; }
391 Label* entry() { return &entry_; }
392 Label* exit() { return external_exit_ != NULL ? external_exit_ : &exit_; }
393 Label* done() { return codegen_->NeedsDeferredFrame() ? &done_ : exit(); }
394 int instruction_index() const { return instruction_index_; }
397 LCodeGen* codegen() const { return codegen_; }
398 MacroAssembler* masm() const { return codegen_->masm(); }
405 Label* external_exit_;
406 int instruction_index_;
409 } } // namespace v8::internal
411 #endif // V8_X64_LITHIUM_CODEGEN_X64_H_