1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #ifndef V8_X87_LITHIUM_CODEGEN_X87_H_
6 #define V8_X87_LITHIUM_CODEGEN_X87_H_
9 #include "src/x87/lithium-x87.h"
11 #include "src/base/logging.h"
12 #include "src/deoptimizer.h"
13 #include "src/lithium-codegen.h"
14 #include "src/safepoint-table.h"
15 #include "src/scopes.h"
16 #include "src/utils.h"
17 #include "src/x87/lithium-gap-resolver-x87.h"
22 // Forward declarations.
25 class SafepointGenerator;
27 class LCodeGen: public LCodeGenBase {
29 LCodeGen(LChunk* chunk, MacroAssembler* assembler, CompilationInfo* info)
30 : LCodeGenBase(chunk, assembler, info),
31 deoptimizations_(4, info->zone()),
32 jump_table_(4, info->zone()),
33 inlined_function_count_(0),
34 scope_(info->scope()),
35 translations_(info->zone()),
36 deferred_(8, info->zone()),
37 dynamic_frame_alignment_(false),
38 support_aligned_spilled_doubles_(false),
40 frame_is_built_(false),
41 x87_stack_(assembler),
42 safepoints_(info->zone()),
44 expected_safepoint_kind_(Safepoint::kSimple) {
45 PopulateDeoptimizationLiteralsWithInlinedFunctions();
48 int LookupDestination(int block_id) const {
49 return chunk()->LookupDestination(block_id);
52 bool IsNextEmittedBlock(int block_id) const {
53 return LookupDestination(block_id) == GetNextEmittedBlock();
56 bool NeedsEagerFrame() const {
57 return GetStackSlotCount() > 0 ||
58 info()->is_non_deferred_calling() ||
60 info()->requires_frame();
62 bool NeedsDeferredFrame() const {
63 return !NeedsEagerFrame() && info()->is_deferred_calling();
66 // Support for converting LOperands to assembler types.
67 Operand ToOperand(LOperand* op) const;
68 Register ToRegister(LOperand* op) const;
69 X87Register ToX87Register(LOperand* op) const;
71 bool IsInteger32(LConstantOperand* op) const;
72 bool IsSmi(LConstantOperand* op) const;
73 Immediate ToImmediate(LOperand* op, const Representation& r) const {
74 return Immediate(ToRepresentation(LConstantOperand::cast(op), r));
76 double ToDouble(LConstantOperand* op) const;
78 // Support for non-sse2 (x87) floating point stack handling.
79 // These functions maintain the mapping of physical stack registers to our
80 // virtual registers between instructions.
81 enum X87OperandType { kX87DoubleOperand, kX87FloatOperand, kX87IntOperand };
83 void X87Mov(X87Register reg, Operand src,
84 X87OperandType operand = kX87DoubleOperand);
85 void X87Mov(Operand src, X87Register reg,
86 X87OperandType operand = kX87DoubleOperand);
87 void X87Mov(X87Register reg, X87Register src,
88 X87OperandType operand = kX87DoubleOperand);
90 void X87PrepareBinaryOp(
91 X87Register left, X87Register right, X87Register result);
93 void X87LoadForUsage(X87Register reg);
94 void X87LoadForUsage(X87Register reg1, X87Register reg2);
95 void X87PrepareToWrite(X87Register reg) { x87_stack_.PrepareToWrite(reg); }
96 void X87CommitWrite(X87Register reg) { x87_stack_.CommitWrite(reg); }
98 void X87Fxch(X87Register reg, int other_slot = 0) {
99 x87_stack_.Fxch(reg, other_slot);
101 void X87Free(X87Register reg) {
102 x87_stack_.Free(reg);
106 bool X87StackEmpty() {
107 return x87_stack_.depth() == 0;
110 Handle<Object> ToHandle(LConstantOperand* op) const;
112 // The operand denoting the second word (the one with a higher address) of
113 // a double stack slot.
114 Operand HighOperand(LOperand* op);
116 // Try to generate code for the entire chunk, but it may fail if the
117 // chunk contains constructs we cannot handle. Returns true if the
118 // code generation attempt succeeded.
121 // Finish the code by setting stack height, safepoint, and bailout
122 // information on it.
123 void FinishCode(Handle<Code> code);
125 // Deferred code support.
126 void DoDeferredNumberTagD(LNumberTagD* instr);
128 enum IntegerSignedness { SIGNED_INT32, UNSIGNED_INT32 };
129 void DoDeferredNumberTagIU(LInstruction* instr,
132 IntegerSignedness signedness);
134 void DoDeferredTaggedToI(LTaggedToI* instr, Label* done);
135 void DoDeferredMathAbsTaggedHeapNumber(LMathAbs* instr);
136 void DoDeferredStackCheck(LStackCheck* instr);
137 void DoDeferredMaybeGrowElements(LMaybeGrowElements* instr);
138 void DoDeferredStringCharCodeAt(LStringCharCodeAt* instr);
139 void DoDeferredStringCharFromCode(LStringCharFromCode* instr);
140 void DoDeferredAllocate(LAllocate* instr);
141 void DoDeferredInstanceMigration(LCheckMaps* instr, Register object);
142 void DoDeferredLoadMutableDouble(LLoadFieldByIndex* instr,
146 // Parallel move support.
147 void DoParallelMove(LParallelMove* move);
148 void DoGap(LGap* instr);
150 // Emit frame translation commands for an environment.
151 void WriteTranslation(LEnvironment* environment, Translation* translation);
153 void EnsureRelocSpaceForDeoptimization();
155 // Declare methods that deal with the individual node types.
156 #define DECLARE_DO(type) void Do##type(L##type* node);
157 LITHIUM_CONCRETE_INSTRUCTION_LIST(DECLARE_DO)
161 LanguageMode language_mode() const { return info()->language_mode(); }
163 Scope* scope() const { return scope_; }
165 void EmitClassOfTest(Label* if_true,
167 Handle<String> class_name,
170 Register temporary2);
172 int GetStackSlotCount() const { return chunk()->spill_slot_count(); }
174 void AddDeferredCode(LDeferredCode* code) { deferred_.Add(code, zone()); }
176 // Code generation passes. Returns true if code generation should
178 void GenerateBodyInstructionPre(LInstruction* instr) override;
179 void GenerateBodyInstructionPost(LInstruction* instr) override;
180 bool GeneratePrologue();
181 bool GenerateDeferredCode();
182 bool GenerateJumpTable();
183 bool GenerateSafepointTable();
185 // Generates the custom OSR entrypoint and sets the osr_pc_offset.
186 void GenerateOsrPrologue();
189 RECORD_SIMPLE_SAFEPOINT,
190 RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS
193 void CallCode(Handle<Code> code,
194 RelocInfo::Mode mode,
195 LInstruction* instr);
197 void CallCodeGeneric(Handle<Code> code,
198 RelocInfo::Mode mode,
200 SafepointMode safepoint_mode);
202 void CallRuntime(const Runtime::Function* fun, int argc, LInstruction* instr,
203 SaveFPRegsMode save_doubles = kDontSaveFPRegs);
205 void CallRuntime(Runtime::FunctionId id,
207 LInstruction* instr) {
208 const Runtime::Function* function = Runtime::FunctionForId(id);
209 CallRuntime(function, argc, instr);
212 void CallRuntimeFromDeferred(Runtime::FunctionId id,
217 void LoadContextFromDeferred(LOperand* context);
219 // Generate a direct call to a known function. Expects the function
221 void CallKnownFunction(Handle<JSFunction> function,
222 int formal_parameter_count, int arity,
223 LInstruction* instr);
225 void RecordSafepointWithLazyDeopt(LInstruction* instr,
226 SafepointMode safepoint_mode);
228 void RegisterEnvironmentForDeoptimization(LEnvironment* environment,
229 Safepoint::DeoptMode mode);
230 void DeoptimizeIf(Condition cc, LInstruction* instr,
231 Deoptimizer::DeoptReason deopt_reason,
232 Deoptimizer::BailoutType bailout_type);
233 void DeoptimizeIf(Condition cc, LInstruction* instr,
234 Deoptimizer::DeoptReason deopt_reason);
236 bool DeoptEveryNTimes() {
237 return FLAG_deopt_every_n_times != 0 && !info()->IsStub();
240 void AddToTranslation(LEnvironment* environment,
241 Translation* translation,
245 int* object_index_pointer,
246 int* dematerialized_index_pointer);
247 void PopulateDeoptimizationData(Handle<Code> code);
249 void PopulateDeoptimizationLiteralsWithInlinedFunctions();
251 Register ToRegister(int index) const;
252 X87Register ToX87Register(int index) const;
253 int32_t ToRepresentation(LConstantOperand* op, const Representation& r) const;
254 int32_t ToInteger32(LConstantOperand* op) const;
255 ExternalReference ToExternalReference(LConstantOperand* op) const;
257 Operand BuildFastArrayOperand(LOperand* elements_pointer,
259 Representation key_representation,
260 ElementsKind elements_kind,
261 uint32_t base_offset);
263 Operand BuildSeqStringOperand(Register string,
265 String::Encoding encoding);
267 void EmitIntegerMathAbs(LMathAbs* instr);
269 // Support for recording safepoint and position information.
270 void RecordSafepoint(LPointerMap* pointers,
271 Safepoint::Kind kind,
273 Safepoint::DeoptMode mode);
274 void RecordSafepoint(LPointerMap* pointers, Safepoint::DeoptMode mode);
275 void RecordSafepoint(Safepoint::DeoptMode mode);
276 void RecordSafepointWithRegisters(LPointerMap* pointers,
278 Safepoint::DeoptMode mode);
280 void RecordAndWritePosition(int position) override;
282 static Condition TokenToCondition(Token::Value op, bool is_unsigned);
283 void EmitGoto(int block);
285 // EmitBranch expects to be the last instruction of a block.
286 template<class InstrType>
287 void EmitBranch(InstrType instr, Condition cc);
288 template <class InstrType>
289 void EmitTrueBranch(InstrType instr, Condition cc);
290 template <class InstrType>
291 void EmitFalseBranch(InstrType instr, Condition cc);
292 void EmitNumberUntagDNoSSE2(LNumberUntagD* instr, Register input,
293 Register temp, X87Register res_reg,
294 NumberUntagDMode mode);
296 // Emits optimized code for typeof x == "y". Modifies input register.
297 // Returns the condition on which a final split to
298 // true and false label should be made, to optimize fallthrough.
299 Condition EmitTypeofIs(LTypeofIsAndBranch* instr, Register input);
301 // Emits optimized code for %_IsString(x). Preserves input register.
302 // Returns the condition on which a final split to
303 // true and false label should be made, to optimize fallthrough.
304 Condition EmitIsString(Register input,
306 Label* is_not_string,
307 SmiCheck check_needed);
309 // Emits optimized code for %_IsConstructCall().
310 // Caller should branch on equal condition.
311 void EmitIsConstructCall(Register temp);
313 // Emits optimized code to deep-copy the contents of statically known
314 // object graphs (e.g. object literal boilerplate).
315 void EmitDeepCopy(Handle<JSObject> object,
319 AllocationSiteMode mode);
321 void EnsureSpaceForLazyDeopt(int space_needed) override;
322 void DoLoadKeyedExternalArray(LLoadKeyed* instr);
323 void DoLoadKeyedFixedDoubleArray(LLoadKeyed* instr);
324 void DoLoadKeyedFixedArray(LLoadKeyed* instr);
325 void DoStoreKeyedExternalArray(LStoreKeyed* instr);
326 void DoStoreKeyedFixedDoubleArray(LStoreKeyed* instr);
327 void DoStoreKeyedFixedArray(LStoreKeyed* instr);
330 void EmitVectorLoadICRegisters(T* instr);
332 void EmitVectorStoreICRegisters(T* instr);
334 void EmitReturn(LReturn* instr, bool dynamic_frame_alignment);
336 // Emits code for pushing either a tagged constant, a (non-double)
337 // register, or a stack slot operand.
338 void EmitPushTaggedOperand(LOperand* operand);
340 void X87Fld(Operand src, X87OperandType opts);
342 void EmitFlushX87ForDeopt();
343 void FlushX87StackIfNecessary(LInstruction* instr) {
344 x87_stack_.FlushIfNecessary(instr, this);
346 friend class LGapResolver;
349 // On windows, you may not access the stack more than one page below
350 // the most recently mapped page. To make the allocated area randomly
351 // accessible, we write an arbitrary value to each page in range
352 // esp + offset - page_size .. esp in turn.
353 void MakeSureStackPagesMapped(int offset);
356 ZoneList<LEnvironment*> deoptimizations_;
357 ZoneList<Deoptimizer::JumpTableEntry> jump_table_;
358 int inlined_function_count_;
360 TranslationBuffer translations_;
361 ZoneList<LDeferredCode*> deferred_;
362 bool dynamic_frame_alignment_;
363 bool support_aligned_spilled_doubles_;
365 bool frame_is_built_;
367 class X87Stack : public ZoneObject {
369 explicit X87Stack(MacroAssembler* masm)
370 : stack_depth_(0), is_mutable_(true), masm_(masm) { }
371 explicit X87Stack(const X87Stack& other)
372 : stack_depth_(other.stack_depth_), is_mutable_(false), masm_(masm()) {
373 for (int i = 0; i < stack_depth_; i++) {
374 stack_[i] = other.stack_[i];
377 bool operator==(const X87Stack& other) const {
378 if (stack_depth_ != other.stack_depth_) return false;
379 for (int i = 0; i < stack_depth_; i++) {
380 if (!stack_[i].is(other.stack_[i])) return false;
384 X87Stack& operator=(const X87Stack& other) {
385 stack_depth_ = other.stack_depth_;
386 for (int i = 0; i < stack_depth_; i++) {
387 stack_[i] = other.stack_[i];
391 bool Contains(X87Register reg);
392 void Fxch(X87Register reg, int other_slot = 0);
393 void Free(X87Register reg);
394 void PrepareToWrite(X87Register reg);
395 void CommitWrite(X87Register reg);
396 void FlushIfNecessary(LInstruction* instr, LCodeGen* cgen);
397 void LeavingBlock(int current_block_id, LGoto* goto_instr, LCodeGen* cgen);
398 int depth() const { return stack_depth_; }
400 int st(X87Register reg) { return st2idx(ArrayIndex(reg)); }
406 void push(X87Register reg) {
408 DCHECK(stack_depth_ < X87Register::kMaxNumAllocatableRegisters);
409 stack_[stack_depth_] = reg;
413 MacroAssembler* masm() const { return masm_; }
414 Isolate* isolate() const { return masm_->isolate(); }
417 int ArrayIndex(X87Register reg);
420 X87Register stack_[X87Register::kMaxNumAllocatableRegisters];
423 MacroAssembler* masm_;
426 // block_id -> X87Stack*;
427 typedef std::map<int, X87Stack*> X87StackMap;
428 X87StackMap x87_stack_map_;
430 // Builder that keeps track of safepoints in the code. The table
431 // itself is emitted at the end of the generated code.
432 SafepointTableBuilder safepoints_;
434 // Compiler from a set of parallel moves to a sequential list of moves.
435 LGapResolver resolver_;
437 Safepoint::Kind expected_safepoint_kind_;
439 class PushSafepointRegistersScope final BASE_EMBEDDED {
441 explicit PushSafepointRegistersScope(LCodeGen* codegen)
442 : codegen_(codegen) {
443 DCHECK(codegen_->expected_safepoint_kind_ == Safepoint::kSimple);
444 codegen_->masm_->PushSafepointRegisters();
445 codegen_->expected_safepoint_kind_ = Safepoint::kWithRegisters;
446 DCHECK(codegen_->info()->is_calling());
449 ~PushSafepointRegistersScope() {
450 DCHECK(codegen_->expected_safepoint_kind_ == Safepoint::kWithRegisters);
451 codegen_->masm_->PopSafepointRegisters();
452 codegen_->expected_safepoint_kind_ = Safepoint::kSimple;
459 friend class LDeferredCode;
460 friend class LEnvironment;
461 friend class SafepointGenerator;
462 friend class X87Stack;
463 DISALLOW_COPY_AND_ASSIGN(LCodeGen);
467 class LDeferredCode : public ZoneObject {
469 explicit LDeferredCode(LCodeGen* codegen, const LCodeGen::X87Stack& x87_stack)
471 external_exit_(NULL),
472 instruction_index_(codegen->current_instruction_),
473 x87_stack_(x87_stack) {
474 codegen->AddDeferredCode(this);
477 virtual ~LDeferredCode() {}
478 virtual void Generate() = 0;
479 virtual LInstruction* instr() = 0;
481 void SetExit(Label* exit) { external_exit_ = exit; }
482 Label* entry() { return &entry_; }
483 Label* exit() { return external_exit_ != NULL ? external_exit_ : &exit_; }
484 Label* done() { return codegen_->NeedsDeferredFrame() ? &done_ : exit(); }
485 int instruction_index() const { return instruction_index_; }
486 const LCodeGen::X87Stack& x87_stack() const { return x87_stack_; }
489 LCodeGen* codegen() const { return codegen_; }
490 MacroAssembler* masm() const { return codegen_->masm(); }
496 Label* external_exit_;
498 int instruction_index_;
499 LCodeGen::X87Stack x87_stack_;
502 } } // namespace v8::internal
504 #endif // V8_X87_LITHIUM_CODEGEN_X87_H_