1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #ifndef V8_IA32_LITHIUM_CODEGEN_IA32_H_
6 #define V8_IA32_LITHIUM_CODEGEN_IA32_H_
8 #include "src/ia32/lithium-ia32.h"
10 #include "src/base/logging.h"
11 #include "src/deoptimizer.h"
12 #include "src/ia32/lithium-gap-resolver-ia32.h"
13 #include "src/lithium-codegen.h"
14 #include "src/safepoint-table.h"
15 #include "src/scopes.h"
16 #include "src/utils.h"
21 // Forward declarations.
24 class SafepointGenerator;
26 class LCodeGen: public LCodeGenBase {
28 LCodeGen(LChunk* chunk, MacroAssembler* assembler, CompilationInfo* info)
29 : LCodeGenBase(chunk, assembler, info),
30 deoptimizations_(4, info->zone()),
31 jump_table_(4, info->zone()),
32 deoptimization_literals_(8, info->zone()),
33 inlined_function_count_(0),
34 scope_(info->scope()),
35 translations_(info->zone()),
36 deferred_(8, info->zone()),
37 dynamic_frame_alignment_(false),
38 support_aligned_spilled_doubles_(false),
40 frame_is_built_(false),
41 safepoints_(info->zone()),
43 expected_safepoint_kind_(Safepoint::kSimple) {
44 PopulateDeoptimizationLiteralsWithInlinedFunctions();
47 int LookupDestination(int block_id) const {
48 return chunk()->LookupDestination(block_id);
51 bool IsNextEmittedBlock(int block_id) const {
52 return LookupDestination(block_id) == GetNextEmittedBlock();
55 bool NeedsEagerFrame() const {
56 return GetStackSlotCount() > 0 ||
57 info()->is_non_deferred_calling() ||
59 info()->requires_frame();
61 bool NeedsDeferredFrame() const {
62 return !NeedsEagerFrame() && info()->is_deferred_calling();
65 // Support for converting LOperands to assembler types.
66 Operand ToOperand(LOperand* op) const;
67 Register ToRegister(LOperand* op) const;
68 XMMRegister ToDoubleRegister(LOperand* op) const;
69 XMMRegister ToFloat32x4Register(LOperand* op) const;
70 XMMRegister ToFloat64x2Register(LOperand* op) const;
71 XMMRegister ToInt32x4Register(LOperand* op) const;
72 XMMRegister ToSIMD128Register(LOperand* op) const;
74 bool IsInteger32(LConstantOperand* op) const;
75 bool IsSmi(LConstantOperand* op) const;
76 Immediate ToImmediate(LOperand* op, const Representation& r) const {
77 return Immediate(ToRepresentation(LConstantOperand::cast(op), r));
79 double ToDouble(LConstantOperand* op) const;
81 Handle<Object> ToHandle(LConstantOperand* op) const;
83 // The operand denoting the second word (the one with a higher address) of
84 // a double stack slot.
85 Operand HighOperand(LOperand* op);
87 // Try to generate code for the entire chunk, but it may fail if the
88 // chunk contains constructs we cannot handle. Returns true if the
89 // code generation attempt succeeded.
92 // Finish the code by setting stack height, safepoint, and bailout
94 void FinishCode(Handle<Code> code);
96 // Deferred code support.
97 void DoDeferredNumberTagD(LNumberTagD* instr);
99 enum IntegerSignedness { SIGNED_INT32, UNSIGNED_INT32 };
100 void DoDeferredNumberTagIU(LInstruction* instr,
103 IntegerSignedness signedness);
105 void DoDeferredTaggedToI(LTaggedToI* instr, Label* done);
106 void DoDeferredFloat32x4ToTagged(LInstruction* instr);
107 void DoDeferredInt32x4ToTagged(LInstruction* instr);
108 void DoDeferredMathAbsTaggedHeapNumber(LMathAbs* instr);
109 void DoDeferredStackCheck(LStackCheck* instr);
110 void DoDeferredStringCharCodeAt(LStringCharCodeAt* instr);
111 void DoDeferredStringCharFromCode(LStringCharFromCode* instr);
112 void DoDeferredAllocate(LAllocate* instr);
113 void DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
115 void DoDeferredInstanceMigration(LCheckMaps* instr, Register object);
116 void DoDeferredLoadMutableDouble(LLoadFieldByIndex* instr,
119 void DoDeferredSIMD128ToTagged(LInstruction* instr, Runtime::FunctionId id);
122 void HandleTaggedToSIMD128(LTaggedToSIMD128* instr);
124 void HandleSIMD128ToTagged(LSIMD128ToTagged* instr);
126 // Parallel move support.
127 void DoParallelMove(LParallelMove* move);
128 void DoGap(LGap* instr);
130 // Emit frame translation commands for an environment.
131 void WriteTranslation(LEnvironment* environment, Translation* translation);
133 void EnsureRelocSpaceForDeoptimization();
135 // Declare methods that deal with the individual node types.
136 #define DECLARE_DO(type) void Do##type(L##type* node);
137 LITHIUM_CONCRETE_INSTRUCTION_LIST(DECLARE_DO)
141 StrictMode strict_mode() const { return info()->strict_mode(); }
143 Scope* scope() const { return scope_; }
145 XMMRegister double_scratch0() const { return xmm0; }
147 void EmitClassOfTest(Label* if_true,
149 Handle<String> class_name,
152 Register temporary2);
154 int GetStackSlotCount() const { return chunk()->spill_slot_count(); }
156 void AddDeferredCode(LDeferredCode* code) { deferred_.Add(code, zone()); }
158 void SaveCallerDoubles();
159 void RestoreCallerDoubles();
161 // Code generation passes. Returns true if code generation should
163 void GenerateBodyInstructionPre(LInstruction* instr) OVERRIDE;
164 void GenerateBodyInstructionPost(LInstruction* instr) OVERRIDE;
165 bool GeneratePrologue();
166 bool GenerateDeferredCode();
167 bool GenerateJumpTable();
168 bool GenerateSafepointTable();
170 // Generates the custom OSR entrypoint and sets the osr_pc_offset.
171 void GenerateOsrPrologue();
174 RECORD_SIMPLE_SAFEPOINT,
175 RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS
178 void CallCode(Handle<Code> code,
179 RelocInfo::Mode mode,
180 LInstruction* instr);
182 void CallCodeGeneric(Handle<Code> code,
183 RelocInfo::Mode mode,
185 SafepointMode safepoint_mode);
187 void CallRuntime(const Runtime::Function* fun,
190 SaveFPRegsMode save_doubles = kDontSaveFPRegs);
192 void CallRuntime(Runtime::FunctionId id,
194 LInstruction* instr) {
195 const Runtime::Function* function = Runtime::FunctionForId(id);
196 CallRuntime(function, argc, instr);
199 void CallRuntimeFromDeferred(Runtime::FunctionId id,
204 void LoadContextFromDeferred(LOperand* context);
211 // Generate a direct call to a known function. Expects the function
213 void CallKnownFunction(Handle<JSFunction> function,
214 int formal_parameter_count,
219 void RecordSafepointWithLazyDeopt(LInstruction* instr,
220 SafepointMode safepoint_mode);
222 void RegisterEnvironmentForDeoptimization(LEnvironment* environment,
223 Safepoint::DeoptMode mode);
224 void DeoptimizeIf(Condition cc, LInstruction* instr, const char* detail,
225 Deoptimizer::BailoutType bailout_type);
226 void DeoptimizeIf(Condition cc, LInstruction* instr, const char* detail);
228 bool DeoptEveryNTimes() {
229 return FLAG_deopt_every_n_times != 0 && !info()->IsStub();
232 void AddToTranslation(LEnvironment* environment,
233 Translation* translation,
237 int* object_index_pointer,
238 int* dematerialized_index_pointer);
239 void PopulateDeoptimizationData(Handle<Code> code);
240 int DefineDeoptimizationLiteral(Handle<Object> literal);
242 void PopulateDeoptimizationLiteralsWithInlinedFunctions();
244 Register ToRegister(int index) const;
245 XMMRegister ToDoubleRegister(int index) const;
246 XMMRegister ToFloat32x4Register(int index) const;
247 XMMRegister ToFloat64x2Register(int index) const;
248 XMMRegister ToInt32x4Register(int index) const;
249 XMMRegister ToSIMD128Register(int index) const;
250 int32_t ToRepresentation(LConstantOperand* op, const Representation& r) const;
251 int32_t ToInteger32(LConstantOperand* op) const;
252 ExternalReference ToExternalReference(LConstantOperand* op) const;
254 Operand BuildFastArrayOperand(LOperand* elements_pointer,
256 Representation key_representation,
257 ElementsKind elements_kind,
258 uint32_t base_offset);
260 Operand BuildSeqStringOperand(Register string,
262 String::Encoding encoding);
264 void EmitIntegerMathAbs(LMathAbs* instr);
266 // Support for recording safepoint and position information.
267 void RecordSafepoint(LPointerMap* pointers,
268 Safepoint::Kind kind,
270 Safepoint::DeoptMode mode);
271 void RecordSafepoint(LPointerMap* pointers, Safepoint::DeoptMode mode);
272 void RecordSafepoint(Safepoint::DeoptMode mode);
273 void RecordSafepointWithRegisters(LPointerMap* pointers,
275 Safepoint::DeoptMode mode);
277 void RecordAndWritePosition(int position) OVERRIDE;
279 static Condition TokenToCondition(Token::Value op, bool is_unsigned);
280 void EmitGoto(int block);
282 // EmitBranch expects to be the last instruction of a block.
283 template<class InstrType>
284 void EmitBranch(InstrType instr, Condition cc);
285 template<class InstrType>
286 void EmitFalseBranch(InstrType instr, Condition cc);
287 void EmitNumberUntagD(LNumberUntagD* instr, Register input, Register temp,
288 XMMRegister result, NumberUntagDMode mode);
290 // Emits optimized code for typeof x == "y". Modifies input register.
291 // Returns the condition on which a final split to
292 // true and false label should be made, to optimize fallthrough.
293 Condition EmitTypeofIs(LTypeofIsAndBranch* instr, Register input);
295 // Emits optimized code for %_IsObject(x). Preserves input register.
296 // Returns the condition on which a final split to
297 // true and false label should be made, to optimize fallthrough.
298 Condition EmitIsObject(Register input,
300 Label* is_not_object,
303 // Emits optimized code for %_IsString(x). Preserves input register.
304 // Returns the condition on which a final split to
305 // true and false label should be made, to optimize fallthrough.
306 Condition EmitIsString(Register input,
308 Label* is_not_string,
309 SmiCheck check_needed);
311 // Emits optimized code for %_IsConstructCall().
312 // Caller should branch on equal condition.
313 void EmitIsConstructCall(Register temp);
315 // Emits optimized code to deep-copy the contents of statically known
316 // object graphs (e.g. object literal boilerplate).
317 void EmitDeepCopy(Handle<JSObject> object,
321 AllocationSiteMode mode);
323 void EnsureSpaceForLazyDeopt(int space_needed) OVERRIDE;
324 void DoLoadKeyedExternalArray(LLoadKeyed* instr);
325 void HandleExternalArrayOpRequiresTemp(LOperand* key,
326 Representation key_representation,
327 ElementsKind elements_kind);
329 void DoLoadKeyedSIMD128ExternalArray(LLoadKeyed* instr);
330 void DoLoadKeyedFixedDoubleArray(LLoadKeyed* instr);
331 void DoLoadKeyedFixedArray(LLoadKeyed* instr);
332 void DoStoreKeyedExternalArray(LStoreKeyed* instr);
334 void DoStoreKeyedSIMD128ExternalArray(LStoreKeyed* instr);
335 void DoStoreKeyedFixedDoubleArray(LStoreKeyed* instr);
336 void DoStoreKeyedFixedArray(LStoreKeyed* instr);
339 void EmitVectorLoadICRegisters(T* instr);
341 void EmitReturn(LReturn* instr, bool dynamic_frame_alignment);
343 // Emits code for pushing either a tagged constant, a (non-double)
344 // register, or a stack slot operand.
345 void EmitPushTaggedOperand(LOperand* operand);
347 friend class LGapResolver;
350 // On windows, you may not access the stack more than one page below
351 // the most recently mapped page. To make the allocated area randomly
352 // accessible, we write an arbitrary value to each page in range
353 // esp + offset - page_size .. esp in turn.
354 void MakeSureStackPagesMapped(int offset);
357 ZoneList<LEnvironment*> deoptimizations_;
358 ZoneList<Deoptimizer::JumpTableEntry> jump_table_;
359 ZoneList<Handle<Object> > deoptimization_literals_;
360 int inlined_function_count_;
362 TranslationBuffer translations_;
363 ZoneList<LDeferredCode*> deferred_;
364 bool dynamic_frame_alignment_;
365 bool support_aligned_spilled_doubles_;
367 bool frame_is_built_;
369 // Builder that keeps track of safepoints in the code. The table
370 // itself is emitted at the end of the generated code.
371 SafepointTableBuilder safepoints_;
373 // Compiler from a set of parallel moves to a sequential list of moves.
374 LGapResolver resolver_;
376 Safepoint::Kind expected_safepoint_kind_;
378 class PushSafepointRegistersScope FINAL BASE_EMBEDDED {
380 explicit PushSafepointRegistersScope(LCodeGen* codegen)
381 : codegen_(codegen) {
382 DCHECK(codegen_->expected_safepoint_kind_ == Safepoint::kSimple);
383 codegen_->masm_->PushSafepointRegisters();
384 codegen_->expected_safepoint_kind_ = Safepoint::kWithRegisters;
385 DCHECK(codegen_->info()->is_calling());
388 ~PushSafepointRegistersScope() {
389 DCHECK(codegen_->expected_safepoint_kind_ == Safepoint::kWithRegisters);
390 codegen_->masm_->PopSafepointRegisters();
391 codegen_->expected_safepoint_kind_ = Safepoint::kSimple;
398 friend class LDeferredCode;
399 friend class LEnvironment;
400 friend class SafepointGenerator;
401 DISALLOW_COPY_AND_ASSIGN(LCodeGen);
405 class LDeferredCode : public ZoneObject {
407 explicit LDeferredCode(LCodeGen* codegen)
409 external_exit_(NULL),
410 instruction_index_(codegen->current_instruction_) {
411 codegen->AddDeferredCode(this);
414 virtual ~LDeferredCode() {}
415 virtual void Generate() = 0;
416 virtual LInstruction* instr() = 0;
418 void SetExit(Label* exit) { external_exit_ = exit; }
419 Label* entry() { return &entry_; }
420 Label* exit() { return external_exit_ != NULL ? external_exit_ : &exit_; }
421 Label* done() { return codegen_->NeedsDeferredFrame() ? &done_ : exit(); }
422 int instruction_index() const { return instruction_index_; }
425 LCodeGen* codegen() const { return codegen_; }
426 MacroAssembler* masm() const { return codegen_->masm(); }
432 Label* external_exit_;
434 int instruction_index_;
437 } } // namespace v8::internal
439 #endif // V8_IA32_LITHIUM_CODEGEN_IA32_H_