1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
28 #ifndef V8_ARM_LITHIUM_CODEGEN_ARM_H_
29 #define V8_ARM_LITHIUM_CODEGEN_ARM_H_
31 #include "arm/lithium-arm.h"
33 #include "arm/lithium-gap-resolver-arm.h"
34 #include "deoptimizer.h"
35 #include "lithium-codegen.h"
36 #include "safepoint-table.h"
43 // Forward declarations.
45 class SafepointGenerator;
47 class LCodeGen: public LCodeGenBase {
49 LCodeGen(LChunk* chunk, MacroAssembler* assembler, CompilationInfo* info)
50 : LCodeGenBase(chunk, assembler, info),
51 deoptimizations_(4, info->zone()),
52 deopt_jump_table_(4, info->zone()),
53 deoptimization_literals_(8, info->zone()),
54 inlined_function_count_(0),
55 scope_(info->scope()),
56 translations_(info->zone()),
57 deferred_(8, info->zone()),
59 frame_is_built_(false),
60 safepoints_(info->zone()),
62 expected_safepoint_kind_(Safepoint::kSimple) {
63 PopulateDeoptimizationLiteralsWithInlinedFunctions();
67 int LookupDestination(int block_id) const {
68 return chunk()->LookupDestination(block_id);
71 bool IsNextEmittedBlock(int block_id) const {
72 return LookupDestination(block_id) == GetNextEmittedBlock();
75 bool NeedsEagerFrame() const {
76 return GetStackSlotCount() > 0 ||
77 info()->is_non_deferred_calling() ||
79 info()->requires_frame();
81 bool NeedsDeferredFrame() const {
82 return !NeedsEagerFrame() && info()->is_deferred_calling();
85 LinkRegisterStatus GetLinkRegisterState() const {
86 return frame_is_built_ ? kLRHasBeenSaved : kLRHasNotBeenSaved;
89 // Support for converting LOperands to assembler types.
90 // LOperand must be a register.
91 Register ToRegister(LOperand* op) const;
93 // LOperand is loaded into scratch, unless already a register.
94 Register EmitLoadRegister(LOperand* op, Register scratch);
96 // LOperand must be a double register.
97 DwVfpRegister ToDoubleRegister(LOperand* op) const;
99 // LOperand is loaded into dbl_scratch, unless already a double register.
100 DwVfpRegister EmitLoadDoubleRegister(LOperand* op,
101 SwVfpRegister flt_scratch,
102 DwVfpRegister dbl_scratch);
103 int32_t ToRepresentation(LConstantOperand* op, const Representation& r) const;
104 int32_t ToInteger32(LConstantOperand* op) const;
105 Smi* ToSmi(LConstantOperand* op) const;
106 double ToDouble(LConstantOperand* op) const;
107 Operand ToOperand(LOperand* op);
108 MemOperand ToMemOperand(LOperand* op) const;
109 // Returns a MemOperand pointing to the high word of a DoubleStackSlot.
110 MemOperand ToHighMemOperand(LOperand* op) const;
112 bool IsInteger32(LConstantOperand* op) const;
113 bool IsSmi(LConstantOperand* op) const;
114 Handle<Object> ToHandle(LConstantOperand* op) const;
116 // Try to generate code for the entire chunk, but it may fail if the
117 // chunk contains constructs we cannot handle. Returns true if the
118 // code generation attempt succeeded.
121 // Finish the code by setting stack height, safepoint, and bailout
122 // information on it.
123 void FinishCode(Handle<Code> code);
125 // Deferred code support.
126 void DoDeferredNumberTagD(LNumberTagD* instr);
128 enum IntegerSignedness { SIGNED_INT32, UNSIGNED_INT32 };
129 void DoDeferredNumberTagI(LInstruction* instr,
131 IntegerSignedness signedness);
133 void DoDeferredTaggedToI(LTaggedToI* instr);
134 void DoDeferredMathAbsTaggedHeapNumber(LMathAbs* instr);
135 void DoDeferredStackCheck(LStackCheck* instr);
136 void DoDeferredStringCharCodeAt(LStringCharCodeAt* instr);
137 void DoDeferredStringCharFromCode(LStringCharFromCode* instr);
138 void DoDeferredAllocate(LAllocate* instr);
139 void DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
141 void DoDeferredInstanceMigration(LCheckMaps* instr, Register object);
142 void DoDeferredSIMD128ToTagged(LInstruction* instr, Runtime::FunctionId id);
144 // Parallel move support.
145 void DoParallelMove(LParallelMove* move);
146 void DoGap(LGap* instr);
148 MemOperand PrepareKeyedOperand(Register key,
150 bool key_is_constant,
154 int additional_index,
155 int additional_offset);
157 // Emit frame translation commands for an environment.
158 void WriteTranslation(LEnvironment* environment, Translation* translation);
160 // Declare methods that deal with the individual node types.
161 #define DECLARE_DO(type) void Do##type(L##type* node);
162 LITHIUM_CONCRETE_INSTRUCTION_LIST(DECLARE_DO)
166 StrictModeFlag strict_mode_flag() const {
167 return info()->is_classic_mode() ? kNonStrictMode : kStrictMode;
170 Scope* scope() const { return scope_; }
172 Register scratch0() { return r9; }
173 LowDwVfpRegister double_scratch0() { return kScratchDoubleReg; }
175 LInstruction* GetNextInstruction();
177 void EmitClassOfTest(Label* if_true,
179 Handle<String> class_name,
182 Register temporary2);
184 int GetStackSlotCount() const { return chunk()->spill_slot_count(); }
186 void Abort(BailoutReason reason);
188 void AddDeferredCode(LDeferredCode* code) { deferred_.Add(code, zone()); }
190 void SaveCallerDoubles();
191 void RestoreCallerDoubles();
193 // Code generation passes. Returns true if code generation should
195 bool GeneratePrologue();
196 bool GenerateDeferredCode();
197 bool GenerateDeoptJumpTable();
198 bool GenerateSafepointTable();
200 // Generates the custom OSR entrypoint and sets the osr_pc_offset.
201 void GenerateOsrPrologue();
204 RECORD_SIMPLE_SAFEPOINT,
205 RECORD_SAFEPOINT_WITH_REGISTERS_AND_NO_ARGUMENTS
210 RelocInfo::Mode mode,
212 TargetAddressStorageMode storage_mode = CAN_INLINE_TARGET_ADDRESS);
214 void CallCodeGeneric(
216 RelocInfo::Mode mode,
218 SafepointMode safepoint_mode,
219 TargetAddressStorageMode storage_mode = CAN_INLINE_TARGET_ADDRESS);
221 void CallRuntime(const Runtime::Function* function,
224 SaveFPRegsMode save_doubles = kDontSaveFPRegs);
226 void CallRuntime(Runtime::FunctionId id,
228 LInstruction* instr) {
229 const Runtime::Function* function = Runtime::FunctionForId(id);
230 CallRuntime(function, num_arguments, instr);
233 void LoadContextFromDeferred(LOperand* context);
234 void CallRuntimeFromDeferred(Runtime::FunctionId id,
244 // Generate a direct call to a known function. Expects the function
246 void CallKnownFunction(Handle<JSFunction> function,
247 int formal_parameter_count,
252 void RecordSafepointWithLazyDeopt(LInstruction* instr,
253 SafepointMode safepoint_mode);
255 void RegisterEnvironmentForDeoptimization(LEnvironment* environment,
256 Safepoint::DeoptMode mode);
257 void DeoptimizeIf(Condition condition,
258 LEnvironment* environment,
259 Deoptimizer::BailoutType bailout_type);
260 void DeoptimizeIf(Condition condition, LEnvironment* environment);
261 void ApplyCheckIf(Condition condition, LBoundsCheck* check);
263 void AddToTranslation(LEnvironment* environment,
264 Translation* translation,
268 int* object_index_pointer,
269 int* dematerialized_index_pointer);
270 void PopulateDeoptimizationData(Handle<Code> code);
271 int DefineDeoptimizationLiteral(Handle<Object> literal);
273 void PopulateDeoptimizationLiteralsWithInlinedFunctions();
275 Register ToRegister(int index) const;
276 DwVfpRegister ToDoubleRegister(int index) const;
278 MemOperand BuildSeqStringOperand(Register string,
280 String::Encoding encoding);
282 void EmitIntegerMathAbs(LMathAbs* instr);
284 // Support for recording safepoint and position information.
285 void RecordSafepoint(LPointerMap* pointers,
286 Safepoint::Kind kind,
288 Safepoint::DeoptMode mode);
289 void RecordSafepoint(LPointerMap* pointers, Safepoint::DeoptMode mode);
290 void RecordSafepoint(Safepoint::DeoptMode mode);
291 void RecordSafepointWithRegisters(LPointerMap* pointers,
293 Safepoint::DeoptMode mode);
294 void RecordSafepointWithRegistersAndDoubles(LPointerMap* pointers,
296 Safepoint::DeoptMode mode);
298 void RecordAndWritePosition(int position) V8_OVERRIDE;
300 static Condition TokenToCondition(Token::Value op, bool is_unsigned);
301 void EmitGoto(int block);
303 // EmitBranch expects to be the last instruction of a block.
304 template<class InstrType>
305 void EmitBranch(InstrType instr, Condition condition);
306 template<class InstrType>
307 void EmitFalseBranch(InstrType instr, Condition condition);
308 void EmitNumberUntagD(Register input,
309 DwVfpRegister result,
310 bool allow_undefined_as_nan,
311 bool deoptimize_on_minus_zero,
313 NumberUntagDMode mode);
315 // Emits optimized code for typeof x == "y". Modifies input register.
316 // Returns the condition on which a final split to
317 // true and false label should be made, to optimize fallthrough.
318 Condition EmitTypeofIs(Label* true_label,
321 Handle<String> type_name);
323 // Emits optimized code for %_IsObject(x). Preserves input register.
324 // Returns the condition on which a final split to
325 // true and false label should be made, to optimize fallthrough.
326 Condition EmitIsObject(Register input,
328 Label* is_not_object,
331 // Emits optimized code for %_IsString(x). Preserves input register.
332 // Returns the condition on which a final split to
333 // true and false label should be made, to optimize fallthrough.
334 Condition EmitIsString(Register input,
336 Label* is_not_string,
337 SmiCheck check_needed);
339 // Emits optimized code for %_IsConstructCall().
340 // Caller should branch on equal condition.
341 void EmitIsConstructCall(Register temp1, Register temp2);
343 // Emits optimized code to deep-copy the contents of statically known
344 // object graphs (e.g. object literal boilerplate).
345 void EmitDeepCopy(Handle<JSObject> object,
349 AllocationSiteMode mode);
351 // Emit optimized code for integer division.
352 // Inputs are signed.
353 // All registers are clobbered.
354 // If 'remainder' is no_reg, it is not computed.
355 void EmitSignedIntegerDivisionByConstant(Register result,
360 LEnvironment* environment);
362 void EnsureSpaceForLazyDeopt(int space_needed) V8_OVERRIDE;
363 void DoLoadKeyedExternalArray(LLoadKeyed* instr);
365 void DoLoadKeyedSIMD128ExternalArray(LLoadKeyed* instr);
366 void DoLoadKeyedFixedDoubleArray(LLoadKeyed* instr);
367 void DoLoadKeyedFixedArray(LLoadKeyed* instr);
368 void DoStoreKeyedExternalArray(LStoreKeyed* instr);
370 void DoStoreKeyedSIMD128ExternalArray(LStoreKeyed* instr);
371 void DoStoreKeyedFixedDoubleArray(LStoreKeyed* instr);
372 void DoStoreKeyedFixedArray(LStoreKeyed* instr);
374 ZoneList<LEnvironment*> deoptimizations_;
375 ZoneList<Deoptimizer::JumpTableEntry> deopt_jump_table_;
376 ZoneList<Handle<Object> > deoptimization_literals_;
377 int inlined_function_count_;
379 TranslationBuffer translations_;
380 ZoneList<LDeferredCode*> deferred_;
382 bool frame_is_built_;
384 // Builder that keeps track of safepoints in the code. The table
385 // itself is emitted at the end of the generated code.
386 SafepointTableBuilder safepoints_;
388 // Compiler from a set of parallel moves to a sequential list of moves.
389 LGapResolver resolver_;
391 Safepoint::Kind expected_safepoint_kind_;
393 class PushSafepointRegistersScope V8_FINAL BASE_EMBEDDED {
395 PushSafepointRegistersScope(LCodeGen* codegen,
396 Safepoint::Kind kind)
397 : codegen_(codegen) {
398 ASSERT(codegen_->info()->is_calling());
399 ASSERT(codegen_->expected_safepoint_kind_ == Safepoint::kSimple);
400 codegen_->expected_safepoint_kind_ = kind;
402 switch (codegen_->expected_safepoint_kind_) {
403 case Safepoint::kWithRegisters:
404 codegen_->masm_->PushSafepointRegisters();
406 case Safepoint::kWithRegistersAndDoubles:
407 codegen_->masm_->PushSafepointRegistersAndDoubles();
414 ~PushSafepointRegistersScope() {
415 Safepoint::Kind kind = codegen_->expected_safepoint_kind_;
416 ASSERT((kind & Safepoint::kWithRegisters) != 0);
418 case Safepoint::kWithRegisters:
419 codegen_->masm_->PopSafepointRegisters();
421 case Safepoint::kWithRegistersAndDoubles:
422 codegen_->masm_->PopSafepointRegistersAndDoubles();
427 codegen_->expected_safepoint_kind_ = Safepoint::kSimple;
434 friend class LDeferredCode;
435 friend class LEnvironment;
436 friend class SafepointGenerator;
437 DISALLOW_COPY_AND_ASSIGN(LCodeGen);
441 class LDeferredCode : public ZoneObject {
443 explicit LDeferredCode(LCodeGen* codegen)
445 external_exit_(NULL),
446 instruction_index_(codegen->current_instruction_) {
447 codegen->AddDeferredCode(this);
450 virtual ~LDeferredCode() {}
451 virtual void Generate() = 0;
452 virtual LInstruction* instr() = 0;
454 void SetExit(Label* exit) { external_exit_ = exit; }
455 Label* entry() { return &entry_; }
456 Label* exit() { return external_exit_ != NULL ? external_exit_ : &exit_; }
457 int instruction_index() const { return instruction_index_; }
460 LCodeGen* codegen() const { return codegen_; }
461 MacroAssembler* masm() const { return codegen_->masm(); }
467 Label* external_exit_;
468 int instruction_index_;
471 } } // namespace v8::internal
473 #endif // V8_ARM_LITHIUM_CODEGEN_ARM_H_