1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #ifndef V8_ARM_LITHIUM_CODEGEN_ARM_H_
6 #define V8_ARM_LITHIUM_CODEGEN_ARM_H_
8 #include "src/arm/lithium-arm.h"
10 #include "src/arm/lithium-gap-resolver-arm.h"
11 #include "src/deoptimizer.h"
12 #include "src/lithium-codegen.h"
13 #include "src/safepoint-table.h"
14 #include "src/scopes.h"
15 #include "src/utils.h"
20 // Forward declarations.
22 class SafepointGenerator;
24 class LCodeGen: public LCodeGenBase {
26 LCodeGen(LChunk* chunk, MacroAssembler* assembler, CompilationInfo* info)
27 : LCodeGenBase(chunk, assembler, info),
28 deoptimizations_(4, info->zone()),
29 deopt_jump_table_(4, info->zone()),
30 deoptimization_literals_(8, info->zone()),
31 inlined_function_count_(0),
32 scope_(info->scope()),
33 translations_(info->zone()),
34 deferred_(8, info->zone()),
36 frame_is_built_(false),
37 safepoints_(info->zone()),
39 expected_safepoint_kind_(Safepoint::kSimple) {
40 PopulateDeoptimizationLiteralsWithInlinedFunctions();
44 int LookupDestination(int block_id) const {
45 return chunk()->LookupDestination(block_id);
48 bool IsNextEmittedBlock(int block_id) const {
49 return LookupDestination(block_id) == GetNextEmittedBlock();
52 bool NeedsEagerFrame() const {
53 return GetStackSlotCount() > 0 ||
54 info()->is_non_deferred_calling() ||
56 info()->requires_frame();
58 bool NeedsDeferredFrame() const {
59 return !NeedsEagerFrame() && info()->is_deferred_calling();
62 LinkRegisterStatus GetLinkRegisterState() const {
63 return frame_is_built_ ? kLRHasBeenSaved : kLRHasNotBeenSaved;
66 // Support for converting LOperands to assembler types.
67 // LOperand must be a register.
68 Register ToRegister(LOperand* op) const;
70 // LOperand is loaded into scratch, unless already a register.
71 Register EmitLoadRegister(LOperand* op, Register scratch);
73 // LOperand must be a double register.
74 DwVfpRegister ToDoubleRegister(LOperand* op) const;
76 // LOperand is loaded into dbl_scratch, unless already a double register.
77 DwVfpRegister EmitLoadDoubleRegister(LOperand* op,
78 SwVfpRegister flt_scratch,
79 DwVfpRegister dbl_scratch);
80 int32_t ToRepresentation(LConstantOperand* op, const Representation& r) const;
81 int32_t ToInteger32(LConstantOperand* op) const;
82 Smi* ToSmi(LConstantOperand* op) const;
83 double ToDouble(LConstantOperand* op) const;
84 Operand ToOperand(LOperand* op);
85 MemOperand ToMemOperand(LOperand* op) const;
86 // Returns a MemOperand pointing to the high word of a DoubleStackSlot.
87 MemOperand ToHighMemOperand(LOperand* op) const;
89 bool IsInteger32(LConstantOperand* op) const;
90 bool IsSmi(LConstantOperand* op) const;
91 Handle<Object> ToHandle(LConstantOperand* op) const;
93 // Try to generate code for the entire chunk, but it may fail if the
94 // chunk contains constructs we cannot handle. Returns true if the
95 // code generation attempt succeeded.
98 // Finish the code by setting stack height, safepoint, and bailout
100 void FinishCode(Handle<Code> code);
102 // Deferred code support.
103 void DoDeferredNumberTagD(LNumberTagD* instr);
105 enum IntegerSignedness { SIGNED_INT32, UNSIGNED_INT32 };
106 void DoDeferredNumberTagIU(LInstruction* instr,
110 IntegerSignedness signedness);
112 void DoDeferredTaggedToI(LTaggedToI* instr);
113 void DoDeferredMathAbsTaggedHeapNumber(LMathAbs* instr);
114 void DoDeferredStackCheck(LStackCheck* instr);
115 void DoDeferredStringCharCodeAt(LStringCharCodeAt* instr);
116 void DoDeferredStringCharFromCode(LStringCharFromCode* instr);
117 void DoDeferredAllocate(LAllocate* instr);
118 void DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
120 void DoDeferredInstanceMigration(LCheckMaps* instr, Register object);
121 void DoDeferredLoadMutableDouble(LLoadFieldByIndex* instr,
125 void DoDeferredSIMD128ToTagged(LInstruction* instr, Runtime::FunctionId id);
127 // Parallel move support.
128 void DoParallelMove(LParallelMove* move);
129 void DoGap(LGap* instr);
131 MemOperand PrepareKeyedOperand(Register key,
133 bool key_is_constant,
139 // Emit frame translation commands for an environment.
140 void WriteTranslation(LEnvironment* environment, Translation* translation);
142 // Declare methods that deal with the individual node types.
143 #define DECLARE_DO(type) void Do##type(L##type* node);
144 LITHIUM_CONCRETE_INSTRUCTION_LIST(DECLARE_DO)
148 StrictMode strict_mode() const { return info()->strict_mode(); }
150 Scope* scope() const { return scope_; }
152 Register scratch0() { return r9; }
153 LowDwVfpRegister double_scratch0() { return kScratchDoubleReg; }
155 LInstruction* GetNextInstruction();
157 void EmitClassOfTest(Label* if_true,
159 Handle<String> class_name,
162 Register temporary2);
164 int GetStackSlotCount() const { return chunk()->spill_slot_count(); }
166 void AddDeferredCode(LDeferredCode* code) { deferred_.Add(code, zone()); }
168 void SaveCallerDoubles();
169 void RestoreCallerDoubles();
171 // Code generation passes. Returns true if code generation should
173 void GenerateBodyInstructionPre(LInstruction* instr) V8_OVERRIDE;
174 bool GeneratePrologue();
175 bool GenerateDeferredCode();
176 bool GenerateDeoptJumpTable();
177 bool GenerateSafepointTable();
179 // Generates the custom OSR entrypoint and sets the osr_pc_offset.
180 void GenerateOsrPrologue();
183 RECORD_SIMPLE_SAFEPOINT,
184 RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS
187 int CallCodeSize(Handle<Code> code, RelocInfo::Mode mode);
191 RelocInfo::Mode mode,
193 TargetAddressStorageMode storage_mode = CAN_INLINE_TARGET_ADDRESS);
195 void CallCodeGeneric(
197 RelocInfo::Mode mode,
199 SafepointMode safepoint_mode,
200 TargetAddressStorageMode storage_mode = CAN_INLINE_TARGET_ADDRESS);
202 void CallRuntime(const Runtime::Function* function,
205 SaveFPRegsMode save_doubles = kDontSaveFPRegs);
207 void CallRuntime(Runtime::FunctionId id,
209 LInstruction* instr) {
210 const Runtime::Function* function = Runtime::FunctionForId(id);
211 CallRuntime(function, num_arguments, instr);
214 void LoadContextFromDeferred(LOperand* context);
215 void CallRuntimeFromDeferred(Runtime::FunctionId id,
225 // Generate a direct call to a known function. Expects the function
227 void CallKnownFunction(Handle<JSFunction> function,
228 int formal_parameter_count,
233 void RecordSafepointWithLazyDeopt(LInstruction* instr,
234 SafepointMode safepoint_mode);
236 void RegisterEnvironmentForDeoptimization(LEnvironment* environment,
237 Safepoint::DeoptMode mode);
238 void DeoptimizeIf(Condition condition,
239 LEnvironment* environment,
240 Deoptimizer::BailoutType bailout_type);
241 void DeoptimizeIf(Condition condition, LEnvironment* environment);
243 void AddToTranslation(LEnvironment* environment,
244 Translation* translation,
248 int* object_index_pointer,
249 int* dematerialized_index_pointer);
250 void PopulateDeoptimizationData(Handle<Code> code);
251 int DefineDeoptimizationLiteral(Handle<Object> literal);
253 void PopulateDeoptimizationLiteralsWithInlinedFunctions();
255 Register ToRegister(int index) const;
256 DwVfpRegister ToDoubleRegister(int index) const;
258 MemOperand BuildSeqStringOperand(Register string,
260 String::Encoding encoding);
262 void EmitIntegerMathAbs(LMathAbs* instr);
264 // Support for recording safepoint and position information.
265 void RecordSafepoint(LPointerMap* pointers,
266 Safepoint::Kind kind,
268 Safepoint::DeoptMode mode);
269 void RecordSafepoint(LPointerMap* pointers, Safepoint::DeoptMode mode);
270 void RecordSafepoint(Safepoint::DeoptMode mode);
271 void RecordSafepointWithRegisters(LPointerMap* pointers,
273 Safepoint::DeoptMode mode);
274 void RecordSafepointWithRegistersAndDoubles(LPointerMap* pointers,
276 Safepoint::DeoptMode mode);
278 void RecordAndWritePosition(int position) V8_OVERRIDE;
280 static Condition TokenToCondition(Token::Value op, bool is_unsigned);
281 void EmitGoto(int block);
283 // EmitBranch expects to be the last instruction of a block.
284 template<class InstrType>
285 void EmitBranch(InstrType instr, Condition condition);
286 template<class InstrType>
287 void EmitFalseBranch(InstrType instr, Condition condition);
288 void EmitNumberUntagD(Register input,
289 DwVfpRegister result,
290 bool allow_undefined_as_nan,
291 bool deoptimize_on_minus_zero,
293 NumberUntagDMode mode);
295 // Emits optimized code for typeof x == "y". Modifies input register.
296 // Returns the condition on which a final split to
297 // true and false label should be made, to optimize fallthrough.
298 Condition EmitTypeofIs(Label* true_label,
301 Handle<String> type_name);
303 // Emits optimized code for %_IsObject(x). Preserves input register.
304 // Returns the condition on which a final split to
305 // true and false label should be made, to optimize fallthrough.
306 Condition EmitIsObject(Register input,
308 Label* is_not_object,
311 // Emits optimized code for %_IsString(x). Preserves input register.
312 // Returns the condition on which a final split to
313 // true and false label should be made, to optimize fallthrough.
314 Condition EmitIsString(Register input,
316 Label* is_not_string,
317 SmiCheck check_needed);
319 // Emits optimized code for %_IsConstructCall().
320 // Caller should branch on equal condition.
321 void EmitIsConstructCall(Register temp1, Register temp2);
323 // Emits optimized code to deep-copy the contents of statically known
324 // object graphs (e.g. object literal boilerplate).
325 void EmitDeepCopy(Handle<JSObject> object,
329 AllocationSiteMode mode);
331 void EnsureSpaceForLazyDeopt(int space_needed) V8_OVERRIDE;
332 void DoLoadKeyedExternalArray(LLoadKeyed* instr);
334 void DoLoadKeyedSIMD128ExternalArray(LLoadKeyed* instr);
335 void DoLoadKeyedFixedDoubleArray(LLoadKeyed* instr);
336 void DoLoadKeyedFixedArray(LLoadKeyed* instr);
337 void DoStoreKeyedExternalArray(LStoreKeyed* instr);
339 void DoStoreKeyedSIMD128ExternalArray(LStoreKeyed* instr);
340 void DoStoreKeyedFixedDoubleArray(LStoreKeyed* instr);
341 void DoStoreKeyedFixedArray(LStoreKeyed* instr);
343 ZoneList<LEnvironment*> deoptimizations_;
344 ZoneList<Deoptimizer::JumpTableEntry> deopt_jump_table_;
345 ZoneList<Handle<Object> > deoptimization_literals_;
346 int inlined_function_count_;
348 TranslationBuffer translations_;
349 ZoneList<LDeferredCode*> deferred_;
351 bool frame_is_built_;
353 // Builder that keeps track of safepoints in the code. The table
354 // itself is emitted at the end of the generated code.
355 SafepointTableBuilder safepoints_;
357 // Compiler from a set of parallel moves to a sequential list of moves.
358 LGapResolver resolver_;
360 Safepoint::Kind expected_safepoint_kind_;
362 class PushSafepointRegistersScope V8_FINAL BASE_EMBEDDED {
364 PushSafepointRegistersScope(LCodeGen* codegen,
365 Safepoint::Kind kind)
366 : codegen_(codegen) {
367 ASSERT(codegen_->info()->is_calling());
368 ASSERT(codegen_->expected_safepoint_kind_ == Safepoint::kSimple);
369 codegen_->expected_safepoint_kind_ = kind;
371 switch (codegen_->expected_safepoint_kind_) {
372 case Safepoint::kWithRegisters:
373 codegen_->masm_->PushSafepointRegisters();
375 case Safepoint::kWithRegistersAndDoubles:
376 codegen_->masm_->PushSafepointRegistersAndDoubles();
383 ~PushSafepointRegistersScope() {
384 Safepoint::Kind kind = codegen_->expected_safepoint_kind_;
385 ASSERT((kind & Safepoint::kWithRegisters) != 0);
387 case Safepoint::kWithRegisters:
388 codegen_->masm_->PopSafepointRegisters();
390 case Safepoint::kWithRegistersAndDoubles:
391 codegen_->masm_->PopSafepointRegistersAndDoubles();
396 codegen_->expected_safepoint_kind_ = Safepoint::kSimple;
403 friend class LDeferredCode;
404 friend class LEnvironment;
405 friend class SafepointGenerator;
406 DISALLOW_COPY_AND_ASSIGN(LCodeGen);
410 class LDeferredCode : public ZoneObject {
412 explicit LDeferredCode(LCodeGen* codegen)
414 external_exit_(NULL),
415 instruction_index_(codegen->current_instruction_) {
416 codegen->AddDeferredCode(this);
419 virtual ~LDeferredCode() {}
420 virtual void Generate() = 0;
421 virtual LInstruction* instr() = 0;
423 void SetExit(Label* exit) { external_exit_ = exit; }
424 Label* entry() { return &entry_; }
425 Label* exit() { return external_exit_ != NULL ? external_exit_ : &exit_; }
426 int instruction_index() const { return instruction_index_; }
429 LCodeGen* codegen() const { return codegen_; }
430 MacroAssembler* masm() const { return codegen_->masm(); }
436 Label* external_exit_;
437 int instruction_index_;
440 } } // namespace v8::internal
442 #endif // V8_ARM_LITHIUM_CODEGEN_ARM_H_