1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
28 #ifndef V8_X64_LITHIUM_CODEGEN_X64_H_
29 #define V8_X64_LITHIUM_CODEGEN_X64_H_
31 #include "x64/lithium-x64.h"
34 #include "deoptimizer.h"
35 #include "lithium-codegen.h"
36 #include "safepoint-table.h"
39 #include "x64/lithium-gap-resolver-x64.h"
44 // Forward declarations.
46 class SafepointGenerator;
48 class LCodeGen: public LCodeGenBase {
50 LCodeGen(LChunk* chunk, MacroAssembler* assembler, CompilationInfo* info)
51 : LCodeGenBase(chunk, assembler, info),
52 deoptimizations_(4, info->zone()),
53 jump_table_(4, info->zone()),
54 deoptimization_literals_(8, info->zone()),
55 inlined_function_count_(0),
56 scope_(info->scope()),
57 translations_(info->zone()),
58 deferred_(8, info->zone()),
60 frame_is_built_(false),
61 safepoints_(info->zone()),
63 expected_safepoint_kind_(Safepoint::kSimple) {
64 PopulateDeoptimizationLiteralsWithInlinedFunctions();
67 int LookupDestination(int block_id) const {
68 return chunk()->LookupDestination(block_id);
71 bool IsNextEmittedBlock(int block_id) const {
72 return LookupDestination(block_id) == GetNextEmittedBlock();
75 bool NeedsEagerFrame() const {
76 return GetStackSlotCount() > 0 ||
77 info()->is_non_deferred_calling() ||
79 info()->requires_frame();
81 bool NeedsDeferredFrame() const {
82 return !NeedsEagerFrame() && info()->is_deferred_calling();
85 // Support for converting LOperands to assembler types.
86 Register ToRegister(LOperand* op) const;
87 XMMRegister ToDoubleRegister(LOperand* op) const;
88 XMMRegister ToFloat32x4Register(LOperand* op) const;
89 XMMRegister ToInt32x4Register(LOperand* op) const;
90 XMMRegister ToSIMD128Register(LOperand* op) const;
91 bool IsInteger32Constant(LConstantOperand* op) const;
92 bool IsSmiConstant(LConstantOperand* op) const;
93 int32_t ToInteger32(LConstantOperand* op) const;
94 Smi* ToSmi(LConstantOperand* op) const;
95 double ToDouble(LConstantOperand* op) const;
96 ExternalReference ToExternalReference(LConstantOperand* op) const;
97 bool IsTaggedConstant(LConstantOperand* op) const;
98 Handle<Object> ToHandle(LConstantOperand* op) const;
99 Operand ToOperand(LOperand* op) const;
101 // Try to generate code for the entire chunk, but it may fail if the
102 // chunk contains constructs we cannot handle. Returns true if the
103 // code generation attempt succeeded.
106 // Finish the code by setting stack height, safepoint, and bailout
107 // information on it.
108 void FinishCode(Handle<Code> code);
110 // Deferred code support.
111 void DoDeferredNumberTagD(LNumberTagD* instr);
112 void DoDeferredNumberTagU(LNumberTagU* instr);
113 void DoDeferredTaggedToI(LTaggedToI* instr, Label* done);
114 void DoDeferredMathAbsTaggedHeapNumber(LMathAbs* instr);
115 void DoDeferredStackCheck(LStackCheck* instr);
116 void DoDeferredStringCharCodeAt(LStringCharCodeAt* instr);
117 void DoDeferredStringCharFromCode(LStringCharFromCode* instr);
118 void DoDeferredAllocate(LAllocate* instr);
119 void DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
121 void DoDeferredInstanceMigration(LCheckMaps* instr, Register object);
122 void DoDeferredSIMD128ToTagged(LSIMD128ToTagged* instr,
123 Runtime::FunctionId id);
126 void HandleTaggedToSIMD128(LTaggedToSIMD128* instr);
128 void HandleSIMD128ToTagged(LSIMD128ToTagged* instr);
130 // Parallel move support.
131 void DoParallelMove(LParallelMove* move);
132 void DoGap(LGap* instr);
134 // Emit frame translation commands for an environment.
135 void WriteTranslation(LEnvironment* environment, Translation* translation);
137 // Declare methods that deal with the individual node types.
138 #define DECLARE_DO(type) void Do##type(L##type* node);
139 LITHIUM_CONCRETE_INSTRUCTION_LIST(DECLARE_DO)
143 StrictModeFlag strict_mode_flag() const {
144 return info()->is_classic_mode() ? kNonStrictMode : kStrictMode;
147 LPlatformChunk* chunk() const { return chunk_; }
148 Scope* scope() const { return scope_; }
149 HGraph* graph() const { return chunk()->graph(); }
151 XMMRegister double_scratch0() const { return xmm0; }
153 void EmitClassOfTest(Label* if_true,
155 Handle<String> class_name,
160 int GetStackSlotCount() const { return chunk()->spill_slot_count(); }
162 void Abort(BailoutReason reason);
164 void AddDeferredCode(LDeferredCode* code) { deferred_.Add(code, zone()); }
167 void SaveCallerDoubles();
168 void RestoreCallerDoubles();
170 // Code generation passes. Returns true if code generation should
172 bool GeneratePrologue();
173 bool GenerateDeferredCode();
174 bool GenerateJumpTable();
175 bool GenerateSafepointTable();
177 // Generates the custom OSR entrypoint and sets the osr_pc_offset.
178 void GenerateOsrPrologue();
181 RECORD_SIMPLE_SAFEPOINT,
182 RECORD_SAFEPOINT_WITH_REGISTERS
185 void CallCodeGeneric(Handle<Code> code,
186 RelocInfo::Mode mode,
188 SafepointMode safepoint_mode,
192 void CallCode(Handle<Code> code,
193 RelocInfo::Mode mode,
194 LInstruction* instr);
196 void CallRuntime(const Runtime::Function* function,
199 SaveFPRegsMode save_doubles = kDontSaveFPRegs);
201 void CallRuntime(Runtime::FunctionId id,
203 LInstruction* instr) {
204 const Runtime::Function* function = Runtime::FunctionForId(id);
205 CallRuntime(function, num_arguments, instr);
208 void CallRuntimeFromDeferred(Runtime::FunctionId id,
213 void LoadContextFromDeferred(LOperand* context);
220 // Generate a direct call to a known function. Expects the function
222 void CallKnownFunction(Handle<JSFunction> function,
223 int formal_parameter_count,
228 void RecordSafepointWithLazyDeopt(LInstruction* instr,
229 SafepointMode safepoint_mode,
231 void RegisterEnvironmentForDeoptimization(LEnvironment* environment,
232 Safepoint::DeoptMode mode);
233 void DeoptimizeIf(Condition cc,
234 LEnvironment* environment,
235 Deoptimizer::BailoutType bailout_type);
236 void DeoptimizeIf(Condition cc, LEnvironment* environment);
237 void ApplyCheckIf(Condition cc, LBoundsCheck* check);
239 bool DeoptEveryNTimes() {
240 return FLAG_deopt_every_n_times != 0 && !info()->IsStub();
243 void AddToTranslation(LEnvironment* environment,
244 Translation* translation,
248 int* object_index_pointer,
249 int* dematerialized_index_pointer);
250 void PopulateDeoptimizationData(Handle<Code> code);
251 int DefineDeoptimizationLiteral(Handle<Object> literal);
253 void PopulateDeoptimizationLiteralsWithInlinedFunctions();
255 Register ToRegister(int index) const;
256 XMMRegister ToDoubleRegister(int index) const;
257 XMMRegister ToSIMD128Register(int index) const;
258 Operand BuildFastArrayOperand(
259 LOperand* elements_pointer,
261 ElementsKind elements_kind,
263 uint32_t additional_index = 0);
265 Operand BuildSeqStringOperand(Register string,
267 String::Encoding encoding);
269 void EmitIntegerMathAbs(LMathAbs* instr);
270 void EmitSmiMathAbs(LMathAbs* instr);
272 // Support for recording safepoint and position information.
273 void RecordSafepoint(LPointerMap* pointers,
274 Safepoint::Kind kind,
276 Safepoint::DeoptMode mode);
277 void RecordSafepoint(LPointerMap* pointers, Safepoint::DeoptMode mode);
278 void RecordSafepoint(Safepoint::DeoptMode mode);
279 void RecordSafepointWithRegisters(LPointerMap* pointers,
281 Safepoint::DeoptMode mode);
282 void RecordAndWritePosition(int position) V8_OVERRIDE;
284 static Condition TokenToCondition(Token::Value op, bool is_unsigned);
285 void EmitGoto(int block);
287 // EmitBranch expects to be the last instruction of a block.
288 template<class InstrType>
289 void EmitBranch(InstrType instr, Condition cc);
290 template<class InstrType>
291 void EmitFalseBranch(InstrType instr, Condition cc);
292 void EmitNumberUntagD(
295 bool allow_undefined_as_nan,
296 bool deoptimize_on_minus_zero,
298 NumberUntagDMode mode = NUMBER_CANDIDATE_IS_ANY_TAGGED);
300 // Emits optimized code for typeof x == "y". Modifies input register.
301 // Returns the condition on which a final split to
302 // true and false label should be made, to optimize fallthrough.
303 Condition EmitTypeofIs(LTypeofIsAndBranch* instr, Register input);
305 // Emits optimized code for %_IsObject(x). Preserves input register.
306 // Returns the condition on which a final split to
307 // true and false label should be made, to optimize fallthrough.
308 Condition EmitIsObject(Register input,
309 Label* is_not_object,
312 // Emits optimized code for %_IsString(x). Preserves input register.
313 // Returns the condition on which a final split to
314 // true and false label should be made, to optimize fallthrough.
315 Condition EmitIsString(Register input,
317 Label* is_not_string,
318 SmiCheck check_needed);
320 // Emits optimized code for %_IsConstructCall().
321 // Caller should branch on equal condition.
322 void EmitIsConstructCall(Register temp);
324 // Emits code for pushing either a tagged constant, a (non-double)
325 // register, or a stack slot operand.
326 void EmitPushTaggedOperand(LOperand* operand);
328 // Emits optimized code to deep-copy the contents of statically known
329 // object graphs (e.g. object literal boilerplate).
330 void EmitDeepCopy(Handle<JSObject> object,
334 AllocationSiteMode mode);
336 void EnsureSpaceForLazyDeopt(int space_needed) V8_OVERRIDE;
337 void DoLoadKeyedExternalArray(LLoadKeyed* instr);
338 void HandleExternalArrayOpRequiresPreScale(LOperand* key,
339 ElementsKind elements_kind);
340 void DoLoadKeyedFixedDoubleArray(LLoadKeyed* instr);
341 void DoLoadKeyedFixedArray(LLoadKeyed* instr);
342 void DoStoreKeyedExternalArray(LStoreKeyed* instr);
343 void DoStoreKeyedFixedDoubleArray(LStoreKeyed* instr);
344 void DoStoreKeyedFixedArray(LStoreKeyed* instr);
346 // On windows, you may not access the stack more than one page below
347 // the most recently mapped page. To make the allocated area randomly
348 // accessible, we write an arbitrary value to each page in range
349 // rsp + offset - page_size .. rsp in turn.
350 void MakeSureStackPagesMapped(int offset);
353 ZoneList<LEnvironment*> deoptimizations_;
354 ZoneList<Deoptimizer::JumpTableEntry> jump_table_;
355 ZoneList<Handle<Object> > deoptimization_literals_;
356 int inlined_function_count_;
358 TranslationBuffer translations_;
359 ZoneList<LDeferredCode*> deferred_;
361 bool frame_is_built_;
363 // Builder that keeps track of safepoints in the code. The table
364 // itself is emitted at the end of the generated code.
365 SafepointTableBuilder safepoints_;
367 // Compiler from a set of parallel moves to a sequential list of moves.
368 LGapResolver resolver_;
370 Safepoint::Kind expected_safepoint_kind_;
372 class PushSafepointRegistersScope V8_FINAL BASE_EMBEDDED {
374 explicit PushSafepointRegistersScope(LCodeGen* codegen)
375 : codegen_(codegen) {
376 ASSERT(codegen_->info()->is_calling());
377 ASSERT(codegen_->expected_safepoint_kind_ == Safepoint::kSimple);
378 codegen_->masm_->PushSafepointRegisters();
379 codegen_->expected_safepoint_kind_ = Safepoint::kWithRegisters;
382 ~PushSafepointRegistersScope() {
383 ASSERT(codegen_->expected_safepoint_kind_ == Safepoint::kWithRegisters);
384 codegen_->masm_->PopSafepointRegisters();
385 codegen_->expected_safepoint_kind_ = Safepoint::kSimple;
392 friend class LDeferredCode;
393 friend class LEnvironment;
394 friend class SafepointGenerator;
395 DISALLOW_COPY_AND_ASSIGN(LCodeGen);
399 class LDeferredCode: public ZoneObject {
401 explicit LDeferredCode(LCodeGen* codegen)
403 external_exit_(NULL),
404 instruction_index_(codegen->current_instruction_) {
405 codegen->AddDeferredCode(this);
408 virtual ~LDeferredCode() {}
409 virtual void Generate() = 0;
410 virtual LInstruction* instr() = 0;
412 void SetExit(Label* exit) { external_exit_ = exit; }
413 Label* entry() { return &entry_; }
414 Label* exit() { return external_exit_ != NULL ? external_exit_ : &exit_; }
415 Label* done() { return codegen_->NeedsDeferredFrame() ? &done_ : exit(); }
416 int instruction_index() const { return instruction_index_; }
419 LCodeGen* codegen() const { return codegen_; }
420 MacroAssembler* masm() const { return codegen_->masm(); }
427 Label* external_exit_;
428 int instruction_index_;
431 } } // namespace v8::internal
433 #endif // V8_X64_LITHIUM_CODEGEN_X64_H_