deps: update v8 to 4.3.61.21
[platform/upstream/nodejs.git] / deps / v8 / src / compiler / instruction-selector-impl.h
1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #ifndef V8_COMPILER_INSTRUCTION_SELECTOR_IMPL_H_
6 #define V8_COMPILER_INSTRUCTION_SELECTOR_IMPL_H_
7
8 #include "src/compiler/instruction.h"
9 #include "src/compiler/instruction-selector.h"
10 #include "src/compiler/linkage.h"
11 #include "src/compiler/schedule.h"
12 #include "src/macro-assembler.h"
13
14 namespace v8 {
15 namespace internal {
16 namespace compiler {
17
18 // Helper struct containing data about a table or lookup switch.
19 struct SwitchInfo {
20   int32_t min_value;           // minimum value of {case_values}
21   int32_t max_value;           // maximum value of {case_values}
22   size_t value_range;          // |max_value - min_value| + 1
23   size_t case_count;           // number of cases
24   int32_t* case_values;        // actual case values, unsorted
25   BasicBlock** case_branches;  // basic blocks corresponding to case values
26   BasicBlock* default_branch;  // default branch target
27 };
28
29 // A helper class for the instruction selector that simplifies construction of
30 // Operands. This class implements a base for architecture-specific helpers.
31 class OperandGenerator {
32  public:
33   explicit OperandGenerator(InstructionSelector* selector)
34       : selector_(selector) {}
35
36   InstructionOperand NoOutput() {
37     return InstructionOperand();  // Generates an invalid operand.
38   }
39
40   InstructionOperand DefineAsRegister(Node* node) {
41     return Define(node,
42                   UnallocatedOperand(UnallocatedOperand::MUST_HAVE_REGISTER,
43                                      GetVReg(node)));
44   }
45
46   InstructionOperand DefineSameAsFirst(Node* node) {
47     return Define(node,
48                   UnallocatedOperand(UnallocatedOperand::SAME_AS_FIRST_INPUT,
49                                      GetVReg(node)));
50   }
51
52   InstructionOperand DefineAsFixed(Node* node, Register reg) {
53     return Define(node, UnallocatedOperand(UnallocatedOperand::FIXED_REGISTER,
54                                            Register::ToAllocationIndex(reg),
55                                            GetVReg(node)));
56   }
57
58   InstructionOperand DefineAsFixed(Node* node, DoubleRegister reg) {
59     return Define(node,
60                   UnallocatedOperand(UnallocatedOperand::FIXED_DOUBLE_REGISTER,
61                                      DoubleRegister::ToAllocationIndex(reg),
62                                      GetVReg(node)));
63   }
64
65   InstructionOperand DefineAsConstant(Node* node) {
66     selector()->MarkAsDefined(node);
67     int virtual_register = GetVReg(node);
68     sequence()->AddConstant(virtual_register, ToConstant(node));
69     return ConstantOperand(virtual_register);
70   }
71
72   InstructionOperand DefineAsLocation(Node* node, LinkageLocation location,
73                                       MachineType type) {
74     return Define(node, ToUnallocatedOperand(location, type, GetVReg(node)));
75   }
76
77   InstructionOperand Use(Node* node) {
78     return Use(node, UnallocatedOperand(UnallocatedOperand::NONE,
79                                         UnallocatedOperand::USED_AT_START,
80                                         GetVReg(node)));
81   }
82
83   InstructionOperand UseRegister(Node* node) {
84     return Use(node, UnallocatedOperand(UnallocatedOperand::MUST_HAVE_REGISTER,
85                                         UnallocatedOperand::USED_AT_START,
86                                         GetVReg(node)));
87   }
88
89   InstructionOperand UseUniqueSlot(Node* node) {
90     return Use(node, UnallocatedOperand(UnallocatedOperand::MUST_HAVE_SLOT,
91                                         GetVReg(node)));
92   }
93
94   // Use register or operand for the node. If a register is chosen, it won't
95   // alias any temporary or output registers.
96   InstructionOperand UseUnique(Node* node) {
97     return Use(node,
98                UnallocatedOperand(UnallocatedOperand::NONE, GetVReg(node)));
99   }
100
101   // Use a unique register for the node that does not alias any temporary or
102   // output registers.
103   InstructionOperand UseUniqueRegister(Node* node) {
104     return Use(node, UnallocatedOperand(UnallocatedOperand::MUST_HAVE_REGISTER,
105                                         GetVReg(node)));
106   }
107
108   InstructionOperand UseFixed(Node* node, Register reg) {
109     return Use(node, UnallocatedOperand(UnallocatedOperand::FIXED_REGISTER,
110                                         Register::ToAllocationIndex(reg),
111                                         GetVReg(node)));
112   }
113
114   InstructionOperand UseFixed(Node* node, DoubleRegister reg) {
115     return Use(node,
116                UnallocatedOperand(UnallocatedOperand::FIXED_DOUBLE_REGISTER,
117                                   DoubleRegister::ToAllocationIndex(reg),
118                                   GetVReg(node)));
119   }
120
121   InstructionOperand UseImmediate(Node* node) {
122     int index = sequence()->AddImmediate(ToConstant(node));
123     return ImmediateOperand(index);
124   }
125
126   InstructionOperand UseLocation(Node* node, LinkageLocation location,
127                                  MachineType type) {
128     return Use(node, ToUnallocatedOperand(location, type, GetVReg(node)));
129   }
130
131   InstructionOperand TempRegister() {
132     return UnallocatedOperand(UnallocatedOperand::MUST_HAVE_REGISTER,
133                               UnallocatedOperand::USED_AT_START,
134                               sequence()->NextVirtualRegister());
135   }
136
137   InstructionOperand TempDoubleRegister() {
138     UnallocatedOperand op = UnallocatedOperand(
139         UnallocatedOperand::MUST_HAVE_REGISTER,
140         UnallocatedOperand::USED_AT_START, sequence()->NextVirtualRegister());
141     sequence()->MarkAsDouble(op.virtual_register());
142     return op;
143   }
144
145   InstructionOperand TempRegister(Register reg) {
146     return UnallocatedOperand(UnallocatedOperand::FIXED_REGISTER,
147                               Register::ToAllocationIndex(reg),
148                               InstructionOperand::kInvalidVirtualRegister);
149   }
150
151   InstructionOperand TempImmediate(int32_t imm) {
152     int index = sequence()->AddImmediate(Constant(imm));
153     return ImmediateOperand(index);
154   }
155
156   InstructionOperand TempLocation(LinkageLocation location, MachineType type) {
157     return ToUnallocatedOperand(location, type,
158                                 sequence()->NextVirtualRegister());
159   }
160
161   InstructionOperand Label(BasicBlock* block) {
162     int index = sequence()->AddImmediate(
163         Constant(RpoNumber::FromInt(block->rpo_number())));
164     return ImmediateOperand(index);
165   }
166
167  protected:
168   InstructionSelector* selector() const { return selector_; }
169   InstructionSequence* sequence() const { return selector()->sequence(); }
170   Zone* zone() const { return selector()->instruction_zone(); }
171
172  private:
173   int GetVReg(Node* node) const { return selector_->GetVirtualRegister(node); }
174
175   static Constant ToConstant(const Node* node) {
176     switch (node->opcode()) {
177       case IrOpcode::kInt32Constant:
178         return Constant(OpParameter<int32_t>(node));
179       case IrOpcode::kInt64Constant:
180         return Constant(OpParameter<int64_t>(node));
181       case IrOpcode::kFloat32Constant:
182         return Constant(OpParameter<float>(node));
183       case IrOpcode::kFloat64Constant:
184       case IrOpcode::kNumberConstant:
185         return Constant(OpParameter<double>(node));
186       case IrOpcode::kExternalConstant:
187         return Constant(OpParameter<ExternalReference>(node));
188       case IrOpcode::kHeapConstant:
189         return Constant(OpParameter<Unique<HeapObject> >(node).handle());
190       default:
191         break;
192     }
193     UNREACHABLE();
194     return Constant(static_cast<int32_t>(0));
195   }
196
197   UnallocatedOperand Define(Node* node, UnallocatedOperand operand) {
198     DCHECK_NOT_NULL(node);
199     DCHECK_EQ(operand.virtual_register(), GetVReg(node));
200     selector()->MarkAsDefined(node);
201     return operand;
202   }
203
204   UnallocatedOperand Use(Node* node, UnallocatedOperand operand) {
205     DCHECK_NOT_NULL(node);
206     DCHECK_EQ(operand.virtual_register(), GetVReg(node));
207     selector()->MarkAsUsed(node);
208     return operand;
209   }
210
211   UnallocatedOperand ToUnallocatedOperand(LinkageLocation location,
212                                           MachineType type,
213                                           int virtual_register) {
214     if (location.location_ == LinkageLocation::ANY_REGISTER) {
215       // any machine register.
216       return UnallocatedOperand(UnallocatedOperand::MUST_HAVE_REGISTER,
217                                 virtual_register);
218     }
219     if (location.location_ < 0) {
220       // a location on the caller frame.
221       return UnallocatedOperand(UnallocatedOperand::FIXED_SLOT,
222                                 location.location_, virtual_register);
223     }
224     if (location.location_ > LinkageLocation::ANY_REGISTER) {
225       // a spill location on this (callee) frame.
226       return UnallocatedOperand(
227           UnallocatedOperand::FIXED_SLOT,
228           location.location_ - LinkageLocation::ANY_REGISTER - 1,
229           virtual_register);
230     }
231     // a fixed register.
232     if (RepresentationOf(type) == kRepFloat64) {
233       return UnallocatedOperand(UnallocatedOperand::FIXED_DOUBLE_REGISTER,
234                                 location.location_, virtual_register);
235     }
236     return UnallocatedOperand(UnallocatedOperand::FIXED_REGISTER,
237                               location.location_, virtual_register);
238   }
239
240   InstructionSelector* selector_;
241 };
242
243
244 // The flags continuation is a way to combine a branch or a materialization
245 // of a boolean value with an instruction that sets the flags register.
246 // The whole instruction is treated as a unit by the register allocator, and
247 // thus no spills or moves can be introduced between the flags-setting
248 // instruction and the branch or set it should be combined with.
249 class FlagsContinuation FINAL {
250  public:
251   FlagsContinuation() : mode_(kFlags_none) {}
252
253   // Creates a new flags continuation from the given condition and true/false
254   // blocks.
255   FlagsContinuation(FlagsCondition condition, BasicBlock* true_block,
256                     BasicBlock* false_block)
257       : mode_(kFlags_branch),
258         condition_(condition),
259         true_block_(true_block),
260         false_block_(false_block) {
261     DCHECK_NOT_NULL(true_block);
262     DCHECK_NOT_NULL(false_block);
263   }
264
265   // Creates a new flags continuation from the given condition and result node.
266   FlagsContinuation(FlagsCondition condition, Node* result)
267       : mode_(kFlags_set), condition_(condition), result_(result) {
268     DCHECK_NOT_NULL(result);
269   }
270
271   bool IsNone() const { return mode_ == kFlags_none; }
272   bool IsBranch() const { return mode_ == kFlags_branch; }
273   bool IsSet() const { return mode_ == kFlags_set; }
274   FlagsCondition condition() const {
275     DCHECK(!IsNone());
276     return condition_;
277   }
278   Node* result() const {
279     DCHECK(IsSet());
280     return result_;
281   }
282   BasicBlock* true_block() const {
283     DCHECK(IsBranch());
284     return true_block_;
285   }
286   BasicBlock* false_block() const {
287     DCHECK(IsBranch());
288     return false_block_;
289   }
290
291   void Negate() {
292     DCHECK(!IsNone());
293     condition_ = NegateFlagsCondition(condition_);
294   }
295
296   void Commute() {
297     DCHECK(!IsNone());
298     switch (condition_) {
299       case kEqual:
300       case kNotEqual:
301       case kOverflow:
302       case kNotOverflow:
303         return;
304       case kSignedLessThan:
305         condition_ = kSignedGreaterThan;
306         return;
307       case kSignedGreaterThanOrEqual:
308         condition_ = kSignedLessThanOrEqual;
309         return;
310       case kSignedLessThanOrEqual:
311         condition_ = kSignedGreaterThanOrEqual;
312         return;
313       case kSignedGreaterThan:
314         condition_ = kSignedLessThan;
315         return;
316       case kUnsignedLessThan:
317         condition_ = kUnsignedGreaterThan;
318         return;
319       case kUnsignedGreaterThanOrEqual:
320         condition_ = kUnsignedLessThanOrEqual;
321         return;
322       case kUnsignedLessThanOrEqual:
323         condition_ = kUnsignedGreaterThanOrEqual;
324         return;
325       case kUnsignedGreaterThan:
326         condition_ = kUnsignedLessThan;
327         return;
328       case kUnorderedEqual:
329       case kUnorderedNotEqual:
330         return;
331     }
332     UNREACHABLE();
333   }
334
335   void OverwriteAndNegateIfEqual(FlagsCondition condition) {
336     bool negate = condition_ == kEqual;
337     condition_ = condition;
338     if (negate) Negate();
339   }
340
341   // Encodes this flags continuation into the given opcode.
342   InstructionCode Encode(InstructionCode opcode) {
343     opcode |= FlagsModeField::encode(mode_);
344     if (mode_ != kFlags_none) {
345       opcode |= FlagsConditionField::encode(condition_);
346     }
347     return opcode;
348   }
349
350  private:
351   FlagsMode mode_;
352   FlagsCondition condition_;
353   Node* result_;             // Only valid if mode_ == kFlags_set.
354   BasicBlock* true_block_;   // Only valid if mode_ == kFlags_branch.
355   BasicBlock* false_block_;  // Only valid if mode_ == kFlags_branch.
356 };
357
358
359 // An internal helper class for generating the operands to calls.
360 // TODO(bmeurer): Get rid of the CallBuffer business and make
361 // InstructionSelector::VisitCall platform independent instead.
362 struct CallBuffer {
363   CallBuffer(Zone* zone, const CallDescriptor* descriptor,
364              FrameStateDescriptor* frame_state);
365
366   const CallDescriptor* descriptor;
367   FrameStateDescriptor* frame_state_descriptor;
368   NodeVector output_nodes;
369   InstructionOperandVector outputs;
370   InstructionOperandVector instruction_args;
371   NodeVector pushed_nodes;
372
373   size_t input_count() const { return descriptor->InputCount(); }
374
375   size_t frame_state_count() const { return descriptor->FrameStateCount(); }
376
377   size_t frame_state_value_count() const {
378     return (frame_state_descriptor == NULL)
379                ? 0
380                : (frame_state_descriptor->GetTotalSize() +
381                   1);  // Include deopt id.
382   }
383 };
384
385 }  // namespace compiler
386 }  // namespace internal
387 }  // namespace v8
388
389 #endif  // V8_COMPILER_INSTRUCTION_SELECTOR_IMPL_H_