deps: update v8 to 4.3.61.21
[platform/upstream/nodejs.git] / deps / v8 / src / compiler / mips64 / instruction-selector-mips64.cc
1 // Copyright 2014 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "src/base/bits.h"
6 #include "src/compiler/instruction-selector-impl.h"
7 #include "src/compiler/node-matchers.h"
8 #include "src/compiler/node-properties.h"
9
10 namespace v8 {
11 namespace internal {
12 namespace compiler {
13
14 #define TRACE_UNIMPL() \
15   PrintF("UNIMPLEMENTED instr_sel: %s at line %d\n", __FUNCTION__, __LINE__)
16
17 #define TRACE() PrintF("instr_sel: %s at line %d\n", __FUNCTION__, __LINE__)
18
19
20 // Adds Mips-specific methods for generating InstructionOperands.
21 class Mips64OperandGenerator FINAL : public OperandGenerator {
22  public:
23   explicit Mips64OperandGenerator(InstructionSelector* selector)
24       : OperandGenerator(selector) {}
25
26   InstructionOperand UseOperand(Node* node, InstructionCode opcode) {
27     if (CanBeImmediate(node, opcode)) {
28       return UseImmediate(node);
29     }
30     return UseRegister(node);
31   }
32
33   bool CanBeImmediate(Node* node, InstructionCode opcode) {
34     int64_t value;
35     if (node->opcode() == IrOpcode::kInt32Constant)
36       value = OpParameter<int32_t>(node);
37     else if (node->opcode() == IrOpcode::kInt64Constant)
38       value = OpParameter<int64_t>(node);
39     else
40       return false;
41     switch (ArchOpcodeField::decode(opcode)) {
42       case kMips64Shl:
43       case kMips64Sar:
44       case kMips64Shr:
45         return is_uint5(value);
46       case kMips64Dshl:
47       case kMips64Dsar:
48       case kMips64Dshr:
49         return is_uint6(value);
50       case kMips64Xor:
51         return is_uint16(value);
52       case kMips64Ldc1:
53       case kMips64Sdc1:
54         return is_int16(value + kIntSize);
55       default:
56         return is_int16(value);
57     }
58   }
59
60  private:
61   bool ImmediateFitsAddrMode1Instruction(int32_t imm) const {
62     TRACE_UNIMPL();
63     return false;
64   }
65 };
66
67
68 static void VisitRR(InstructionSelector* selector, ArchOpcode opcode,
69                     Node* node) {
70   Mips64OperandGenerator g(selector);
71   selector->Emit(opcode, g.DefineAsRegister(node),
72                  g.UseRegister(node->InputAt(0)));
73 }
74
75
76 static void VisitRRR(InstructionSelector* selector, ArchOpcode opcode,
77                      Node* node) {
78   Mips64OperandGenerator g(selector);
79   selector->Emit(opcode, g.DefineAsRegister(node),
80                  g.UseRegister(node->InputAt(0)),
81                  g.UseRegister(node->InputAt(1)));
82 }
83
84
85 static void VisitRRO(InstructionSelector* selector, ArchOpcode opcode,
86                      Node* node) {
87   Mips64OperandGenerator g(selector);
88   selector->Emit(opcode, g.DefineAsRegister(node),
89                  g.UseRegister(node->InputAt(0)),
90                  g.UseOperand(node->InputAt(1), opcode));
91 }
92
93
94 static void VisitBinop(InstructionSelector* selector, Node* node,
95                        InstructionCode opcode, FlagsContinuation* cont) {
96   Mips64OperandGenerator g(selector);
97   Int32BinopMatcher m(node);
98   InstructionOperand inputs[4];
99   size_t input_count = 0;
100   InstructionOperand outputs[2];
101   size_t output_count = 0;
102
103   inputs[input_count++] = g.UseRegister(m.left().node());
104   inputs[input_count++] = g.UseOperand(m.right().node(), opcode);
105
106   if (cont->IsBranch()) {
107     inputs[input_count++] = g.Label(cont->true_block());
108     inputs[input_count++] = g.Label(cont->false_block());
109   }
110
111   outputs[output_count++] = g.DefineAsRegister(node);
112   if (cont->IsSet()) {
113     outputs[output_count++] = g.DefineAsRegister(cont->result());
114   }
115
116   DCHECK_NE(0u, input_count);
117   DCHECK_NE(0u, output_count);
118   DCHECK_GE(arraysize(inputs), input_count);
119   DCHECK_GE(arraysize(outputs), output_count);
120
121   selector->Emit(cont->Encode(opcode), output_count, outputs, input_count,
122                  inputs);
123 }
124
125
126 static void VisitBinop(InstructionSelector* selector, Node* node,
127                        InstructionCode opcode) {
128   FlagsContinuation cont;
129   VisitBinop(selector, node, opcode, &cont);
130 }
131
132
133 void InstructionSelector::VisitLoad(Node* node) {
134   MachineType rep = RepresentationOf(OpParameter<LoadRepresentation>(node));
135   MachineType typ = TypeOf(OpParameter<LoadRepresentation>(node));
136   Mips64OperandGenerator g(this);
137   Node* base = node->InputAt(0);
138   Node* index = node->InputAt(1);
139
140   ArchOpcode opcode;
141   switch (rep) {
142     case kRepFloat32:
143       opcode = kMips64Lwc1;
144       break;
145     case kRepFloat64:
146       opcode = kMips64Ldc1;
147       break;
148     case kRepBit:  // Fall through.
149     case kRepWord8:
150       opcode = typ == kTypeUint32 ? kMips64Lbu : kMips64Lb;
151       break;
152     case kRepWord16:
153       opcode = typ == kTypeUint32 ? kMips64Lhu : kMips64Lh;
154       break;
155     case kRepWord32:
156       opcode = kMips64Lw;
157       break;
158     case kRepTagged:  // Fall through.
159     case kRepWord64:
160       opcode = kMips64Ld;
161       break;
162     default:
163       UNREACHABLE();
164       return;
165   }
166
167   if (g.CanBeImmediate(index, opcode)) {
168     Emit(opcode | AddressingModeField::encode(kMode_MRI),
169          g.DefineAsRegister(node), g.UseRegister(base), g.UseImmediate(index));
170   } else {
171     InstructionOperand addr_reg = g.TempRegister();
172     Emit(kMips64Dadd | AddressingModeField::encode(kMode_None), addr_reg,
173          g.UseRegister(index), g.UseRegister(base));
174     // Emit desired load opcode, using temp addr_reg.
175     Emit(opcode | AddressingModeField::encode(kMode_MRI),
176          g.DefineAsRegister(node), addr_reg, g.TempImmediate(0));
177   }
178 }
179
180
181 void InstructionSelector::VisitStore(Node* node) {
182   Mips64OperandGenerator g(this);
183   Node* base = node->InputAt(0);
184   Node* index = node->InputAt(1);
185   Node* value = node->InputAt(2);
186
187   StoreRepresentation store_rep = OpParameter<StoreRepresentation>(node);
188   MachineType rep = RepresentationOf(store_rep.machine_type());
189   if (store_rep.write_barrier_kind() == kFullWriteBarrier) {
190     DCHECK(rep == kRepTagged);
191     // TODO(dcarney): refactor RecordWrite function to take temp registers
192     //                and pass them here instead of using fixed regs
193     // TODO(dcarney): handle immediate indices.
194     InstructionOperand temps[] = {g.TempRegister(t1), g.TempRegister(t2)};
195     Emit(kMips64StoreWriteBarrier, g.NoOutput(), g.UseFixed(base, t0),
196          g.UseFixed(index, t1), g.UseFixed(value, t2), arraysize(temps), temps);
197     return;
198   }
199   DCHECK_EQ(kNoWriteBarrier, store_rep.write_barrier_kind());
200
201   ArchOpcode opcode;
202   switch (rep) {
203     case kRepFloat32:
204       opcode = kMips64Swc1;
205       break;
206     case kRepFloat64:
207       opcode = kMips64Sdc1;
208       break;
209     case kRepBit:  // Fall through.
210     case kRepWord8:
211       opcode = kMips64Sb;
212       break;
213     case kRepWord16:
214       opcode = kMips64Sh;
215       break;
216     case kRepWord32:
217       opcode = kMips64Sw;
218       break;
219     case kRepTagged:  // Fall through.
220     case kRepWord64:
221       opcode = kMips64Sd;
222       break;
223     default:
224       UNREACHABLE();
225       return;
226   }
227
228   if (g.CanBeImmediate(index, opcode)) {
229     Emit(opcode | AddressingModeField::encode(kMode_MRI), g.NoOutput(),
230          g.UseRegister(base), g.UseImmediate(index), g.UseRegister(value));
231   } else {
232     InstructionOperand addr_reg = g.TempRegister();
233     Emit(kMips64Dadd | AddressingModeField::encode(kMode_None), addr_reg,
234          g.UseRegister(index), g.UseRegister(base));
235     // Emit desired store opcode, using temp addr_reg.
236     Emit(opcode | AddressingModeField::encode(kMode_MRI), g.NoOutput(),
237          addr_reg, g.TempImmediate(0), g.UseRegister(value));
238   }
239 }
240
241
242 void InstructionSelector::VisitWord32And(Node* node) {
243   VisitBinop(this, node, kMips64And);
244 }
245
246
247 void InstructionSelector::VisitWord64And(Node* node) {
248   VisitBinop(this, node, kMips64And);
249 }
250
251
252 void InstructionSelector::VisitWord32Or(Node* node) {
253   VisitBinop(this, node, kMips64Or);
254 }
255
256
257 void InstructionSelector::VisitWord64Or(Node* node) {
258   VisitBinop(this, node, kMips64Or);
259 }
260
261
262 void InstructionSelector::VisitWord32Xor(Node* node) {
263   VisitBinop(this, node, kMips64Xor);
264 }
265
266
267 void InstructionSelector::VisitWord64Xor(Node* node) {
268   VisitBinop(this, node, kMips64Xor);
269 }
270
271
272 void InstructionSelector::VisitWord32Shl(Node* node) {
273   VisitRRO(this, kMips64Shl, node);
274 }
275
276
277 void InstructionSelector::VisitWord32Shr(Node* node) {
278   VisitRRO(this, kMips64Shr, node);
279 }
280
281
282 void InstructionSelector::VisitWord32Sar(Node* node) {
283   VisitRRO(this, kMips64Sar, node);
284 }
285
286
287 void InstructionSelector::VisitWord64Shl(Node* node) {
288   VisitRRO(this, kMips64Dshl, node);
289 }
290
291
292 void InstructionSelector::VisitWord64Shr(Node* node) {
293   VisitRRO(this, kMips64Dshr, node);
294 }
295
296
297 void InstructionSelector::VisitWord64Sar(Node* node) {
298   VisitRRO(this, kMips64Dsar, node);
299 }
300
301
302 void InstructionSelector::VisitWord32Ror(Node* node) {
303   VisitRRO(this, kMips64Ror, node);
304 }
305
306
307 void InstructionSelector::VisitWord32Clz(Node* node) {
308   Mips64OperandGenerator g(this);
309   Emit(kMips64Clz, g.DefineAsRegister(node), g.UseRegister(node->InputAt(0)));
310 }
311
312
313 void InstructionSelector::VisitWord64Ror(Node* node) {
314   VisitRRO(this, kMips64Dror, node);
315 }
316
317
318 void InstructionSelector::VisitInt32Add(Node* node) {
319   Mips64OperandGenerator g(this);
320   // TODO(plind): Consider multiply & add optimization from arm port.
321   VisitBinop(this, node, kMips64Add);
322 }
323
324
325 void InstructionSelector::VisitInt64Add(Node* node) {
326   Mips64OperandGenerator g(this);
327   // TODO(plind): Consider multiply & add optimization from arm port.
328   VisitBinop(this, node, kMips64Dadd);
329 }
330
331
332 void InstructionSelector::VisitInt32Sub(Node* node) {
333   VisitBinop(this, node, kMips64Sub);
334 }
335
336
337 void InstructionSelector::VisitInt64Sub(Node* node) {
338   VisitBinop(this, node, kMips64Dsub);
339 }
340
341
342 void InstructionSelector::VisitInt32Mul(Node* node) {
343   Mips64OperandGenerator g(this);
344   Int32BinopMatcher m(node);
345   if (m.right().HasValue() && m.right().Value() > 0) {
346     int32_t value = m.right().Value();
347     if (base::bits::IsPowerOfTwo32(value)) {
348       Emit(kMips64Shl | AddressingModeField::encode(kMode_None),
349            g.DefineAsRegister(node), g.UseRegister(m.left().node()),
350            g.TempImmediate(WhichPowerOf2(value)));
351       return;
352     }
353     if (base::bits::IsPowerOfTwo32(value - 1)) {
354       InstructionOperand temp = g.TempRegister();
355       Emit(kMips64Shl | AddressingModeField::encode(kMode_None), temp,
356            g.UseRegister(m.left().node()),
357            g.TempImmediate(WhichPowerOf2(value - 1)));
358       Emit(kMips64Add | AddressingModeField::encode(kMode_None),
359            g.DefineAsRegister(node), g.UseRegister(m.left().node()), temp);
360       return;
361     }
362     if (base::bits::IsPowerOfTwo32(value + 1)) {
363       InstructionOperand temp = g.TempRegister();
364       Emit(kMips64Shl | AddressingModeField::encode(kMode_None), temp,
365            g.UseRegister(m.left().node()),
366            g.TempImmediate(WhichPowerOf2(value + 1)));
367       Emit(kMips64Sub | AddressingModeField::encode(kMode_None),
368            g.DefineAsRegister(node), temp, g.UseRegister(m.left().node()));
369       return;
370     }
371   }
372   Emit(kMips64Mul, g.DefineAsRegister(node), g.UseRegister(m.left().node()),
373        g.UseRegister(m.right().node()));
374 }
375
376
377 void InstructionSelector::VisitInt32MulHigh(Node* node) {
378   Mips64OperandGenerator g(this);
379   Emit(kMips64MulHigh, g.DefineAsRegister(node),
380        g.UseRegister(node->InputAt(0)), g.UseRegister(node->InputAt(1)));
381 }
382
383
384 void InstructionSelector::VisitUint32MulHigh(Node* node) {
385   Mips64OperandGenerator g(this);
386   InstructionOperand const dmul_operand = g.TempRegister();
387   Emit(kMips64MulHighU, dmul_operand, g.UseRegister(node->InputAt(0)),
388        g.UseRegister(node->InputAt(1)));
389   Emit(kMips64Ext, g.DefineAsRegister(node), dmul_operand, g.TempImmediate(0),
390        g.TempImmediate(32));
391 }
392
393
394 void InstructionSelector::VisitInt64Mul(Node* node) {
395   Mips64OperandGenerator g(this);
396   Int64BinopMatcher m(node);
397   // TODO(dusmil): Add optimization for shifts larger than 32.
398   if (m.right().HasValue() && m.right().Value() > 0) {
399     int64_t value = m.right().Value();
400     if (base::bits::IsPowerOfTwo32(value)) {
401       Emit(kMips64Dshl | AddressingModeField::encode(kMode_None),
402            g.DefineAsRegister(node), g.UseRegister(m.left().node()),
403            g.TempImmediate(WhichPowerOf2(value)));
404       return;
405     }
406     if (base::bits::IsPowerOfTwo32(value - 1)) {
407       InstructionOperand temp = g.TempRegister();
408       Emit(kMips64Dshl | AddressingModeField::encode(kMode_None), temp,
409            g.UseRegister(m.left().node()),
410            g.TempImmediate(WhichPowerOf2(value - 1)));
411       Emit(kMips64Dadd | AddressingModeField::encode(kMode_None),
412            g.DefineAsRegister(node), g.UseRegister(m.left().node()), temp);
413       return;
414     }
415     if (base::bits::IsPowerOfTwo32(value + 1)) {
416       InstructionOperand temp = g.TempRegister();
417       Emit(kMips64Dshl | AddressingModeField::encode(kMode_None), temp,
418            g.UseRegister(m.left().node()),
419            g.TempImmediate(WhichPowerOf2(value + 1)));
420       Emit(kMips64Dsub | AddressingModeField::encode(kMode_None),
421            g.DefineAsRegister(node), temp, g.UseRegister(m.left().node()));
422       return;
423     }
424   }
425   Emit(kMips64Dmul, g.DefineAsRegister(node), g.UseRegister(m.left().node()),
426        g.UseRegister(m.right().node()));
427 }
428
429
430 void InstructionSelector::VisitInt32Div(Node* node) {
431   Mips64OperandGenerator g(this);
432   Int32BinopMatcher m(node);
433   Emit(kMips64Div, g.DefineAsRegister(node), g.UseRegister(m.left().node()),
434        g.UseRegister(m.right().node()));
435 }
436
437
438 void InstructionSelector::VisitUint32Div(Node* node) {
439   Mips64OperandGenerator g(this);
440   Int32BinopMatcher m(node);
441   Emit(kMips64DivU, g.DefineAsRegister(node), g.UseRegister(m.left().node()),
442        g.UseRegister(m.right().node()));
443 }
444
445
446 void InstructionSelector::VisitInt32Mod(Node* node) {
447   Mips64OperandGenerator g(this);
448   Int32BinopMatcher m(node);
449   Emit(kMips64Mod, g.DefineAsRegister(node), g.UseRegister(m.left().node()),
450        g.UseRegister(m.right().node()));
451 }
452
453
454 void InstructionSelector::VisitUint32Mod(Node* node) {
455   Mips64OperandGenerator g(this);
456   Int32BinopMatcher m(node);
457   Emit(kMips64ModU, g.DefineAsRegister(node), g.UseRegister(m.left().node()),
458        g.UseRegister(m.right().node()));
459 }
460
461
462 void InstructionSelector::VisitInt64Div(Node* node) {
463   Mips64OperandGenerator g(this);
464   Int64BinopMatcher m(node);
465   Emit(kMips64Ddiv, g.DefineAsRegister(node), g.UseRegister(m.left().node()),
466        g.UseRegister(m.right().node()));
467 }
468
469
470 void InstructionSelector::VisitUint64Div(Node* node) {
471   Mips64OperandGenerator g(this);
472   Int64BinopMatcher m(node);
473   Emit(kMips64DdivU, g.DefineAsRegister(node), g.UseRegister(m.left().node()),
474        g.UseRegister(m.right().node()));
475 }
476
477
478 void InstructionSelector::VisitInt64Mod(Node* node) {
479   Mips64OperandGenerator g(this);
480   Int64BinopMatcher m(node);
481   Emit(kMips64Dmod, g.DefineAsRegister(node), g.UseRegister(m.left().node()),
482        g.UseRegister(m.right().node()));
483 }
484
485
486 void InstructionSelector::VisitUint64Mod(Node* node) {
487   Mips64OperandGenerator g(this);
488   Int64BinopMatcher m(node);
489   Emit(kMips64DmodU, g.DefineAsRegister(node), g.UseRegister(m.left().node()),
490        g.UseRegister(m.right().node()));
491 }
492
493
494 void InstructionSelector::VisitChangeFloat32ToFloat64(Node* node) {
495   Mips64OperandGenerator g(this);
496   Emit(kMips64CvtDS, g.DefineAsRegister(node), g.UseRegister(node->InputAt(0)));
497 }
498
499
500 void InstructionSelector::VisitChangeInt32ToFloat64(Node* node) {
501   Mips64OperandGenerator g(this);
502   Emit(kMips64CvtDW, g.DefineAsRegister(node), g.UseRegister(node->InputAt(0)));
503 }
504
505
506 void InstructionSelector::VisitChangeUint32ToFloat64(Node* node) {
507   Mips64OperandGenerator g(this);
508   Emit(kMips64CvtDUw, g.DefineAsRegister(node),
509        g.UseRegister(node->InputAt(0)));
510 }
511
512
513 void InstructionSelector::VisitChangeFloat64ToInt32(Node* node) {
514   Mips64OperandGenerator g(this);
515   Emit(kMips64TruncWD, g.DefineAsRegister(node),
516        g.UseRegister(node->InputAt(0)));
517 }
518
519
520 void InstructionSelector::VisitChangeFloat64ToUint32(Node* node) {
521   Mips64OperandGenerator g(this);
522   Emit(kMips64TruncUwD, g.DefineAsRegister(node),
523        g.UseRegister(node->InputAt(0)));
524 }
525
526
527 void InstructionSelector::VisitChangeInt32ToInt64(Node* node) {
528   Mips64OperandGenerator g(this);
529   Emit(kMips64Shl, g.DefineAsRegister(node), g.UseRegister(node->InputAt(0)),
530        g.TempImmediate(0));
531 }
532
533
534 void InstructionSelector::VisitChangeUint32ToUint64(Node* node) {
535   Mips64OperandGenerator g(this);
536   Emit(kMips64Dext, g.DefineAsRegister(node), g.UseRegister(node->InputAt(0)),
537        g.TempImmediate(0), g.TempImmediate(32));
538 }
539
540
541 void InstructionSelector::VisitTruncateInt64ToInt32(Node* node) {
542   Mips64OperandGenerator g(this);
543   Emit(kMips64Ext, g.DefineAsRegister(node), g.UseRegister(node->InputAt(0)),
544        g.TempImmediate(0), g.TempImmediate(32));
545 }
546
547
548 void InstructionSelector::VisitTruncateFloat64ToFloat32(Node* node) {
549   Mips64OperandGenerator g(this);
550   Emit(kMips64CvtSD, g.DefineAsRegister(node), g.UseRegister(node->InputAt(0)));
551 }
552
553
554 void InstructionSelector::VisitFloat64Add(Node* node) {
555   VisitRRR(this, kMips64AddD, node);
556 }
557
558
559 void InstructionSelector::VisitFloat64Sub(Node* node) {
560   Mips64OperandGenerator g(this);
561   Float64BinopMatcher m(node);
562   if (m.left().IsMinusZero() && m.right().IsFloat64RoundDown() &&
563       CanCover(m.node(), m.right().node())) {
564     if (m.right().InputAt(0)->opcode() == IrOpcode::kFloat64Sub &&
565         CanCover(m.right().node(), m.right().InputAt(0))) {
566       Float64BinopMatcher mright0(m.right().InputAt(0));
567       if (mright0.left().IsMinusZero()) {
568         Emit(kMips64Float64RoundUp, g.DefineAsRegister(node),
569              g.UseRegister(mright0.right().node()));
570         return;
571       }
572     }
573   }
574   VisitRRR(this, kMips64SubD, node);
575 }
576
577
578 void InstructionSelector::VisitFloat64Mul(Node* node) {
579   VisitRRR(this, kMips64MulD, node);
580 }
581
582
583 void InstructionSelector::VisitFloat64Div(Node* node) {
584   VisitRRR(this, kMips64DivD, node);
585 }
586
587
588 void InstructionSelector::VisitFloat64Mod(Node* node) {
589   Mips64OperandGenerator g(this);
590   Emit(kMips64ModD, g.DefineAsFixed(node, f0),
591        g.UseFixed(node->InputAt(0), f12),
592        g.UseFixed(node->InputAt(1), f14))->MarkAsCall();
593 }
594
595
596 void InstructionSelector::VisitFloat64Max(Node* node) { UNREACHABLE(); }
597
598
599 void InstructionSelector::VisitFloat64Min(Node* node) { UNREACHABLE(); }
600
601
602 void InstructionSelector::VisitFloat64Sqrt(Node* node) {
603   Mips64OperandGenerator g(this);
604   Emit(kMips64SqrtD, g.DefineAsRegister(node), g.UseRegister(node->InputAt(0)));
605 }
606
607
608 void InstructionSelector::VisitFloat64RoundDown(Node* node) {
609   VisitRR(this, kMips64Float64RoundDown, node);
610 }
611
612
613 void InstructionSelector::VisitFloat64RoundTruncate(Node* node) {
614   VisitRR(this, kMips64Float64RoundTruncate, node);
615 }
616
617
618 void InstructionSelector::VisitFloat64RoundTiesAway(Node* node) {
619   UNREACHABLE();
620 }
621
622
623 void InstructionSelector::VisitCall(Node* node, BasicBlock* handler) {
624   Mips64OperandGenerator g(this);
625   const CallDescriptor* descriptor = OpParameter<const CallDescriptor*>(node);
626
627   FrameStateDescriptor* frame_state_descriptor = NULL;
628   if (descriptor->NeedsFrameState()) {
629     frame_state_descriptor =
630         GetFrameStateDescriptor(node->InputAt(descriptor->InputCount()));
631   }
632
633   CallBuffer buffer(zone(), descriptor, frame_state_descriptor);
634
635   // Compute InstructionOperands for inputs and outputs.
636   InitializeCallBuffer(node, &buffer, true, false);
637
638   int push_count = buffer.pushed_nodes.size();
639   if (push_count > 0) {
640     Emit(kMips64StackClaim | MiscField::encode(push_count), g.NoOutput());
641   }
642   int slot = buffer.pushed_nodes.size() - 1;
643   for (auto i = buffer.pushed_nodes.rbegin(); i != buffer.pushed_nodes.rend();
644        ++i) {
645     Emit(kMips64StoreToStackSlot | MiscField::encode(slot), g.NoOutput(),
646          g.UseRegister(*i));
647     slot--;
648   }
649
650   // Pass label of exception handler block.
651   CallDescriptor::Flags flags = descriptor->flags();
652   if (handler != nullptr) {
653     flags |= CallDescriptor::kHasExceptionHandler;
654     buffer.instruction_args.push_back(g.Label(handler));
655   }
656
657   // Select the appropriate opcode based on the call type.
658   InstructionCode opcode;
659   switch (descriptor->kind()) {
660     case CallDescriptor::kCallCodeObject: {
661       opcode = kArchCallCodeObject;
662       break;
663     }
664     case CallDescriptor::kCallJSFunction:
665       opcode = kArchCallJSFunction;
666       break;
667     default:
668       UNREACHABLE();
669       return;
670   }
671   opcode |= MiscField::encode(flags);
672
673   // Emit the call instruction.
674   Instruction* call_instr =
675       Emit(opcode, buffer.outputs.size(), &buffer.outputs.front(),
676            buffer.instruction_args.size(), &buffer.instruction_args.front());
677
678   call_instr->MarkAsCall();
679 }
680
681
682 void InstructionSelector::VisitCheckedLoad(Node* node) {
683   MachineType rep = RepresentationOf(OpParameter<MachineType>(node));
684   MachineType typ = TypeOf(OpParameter<MachineType>(node));
685   Mips64OperandGenerator g(this);
686   Node* const buffer = node->InputAt(0);
687   Node* const offset = node->InputAt(1);
688   Node* const length = node->InputAt(2);
689   ArchOpcode opcode;
690   switch (rep) {
691     case kRepWord8:
692       opcode = typ == kTypeInt32 ? kCheckedLoadInt8 : kCheckedLoadUint8;
693       break;
694     case kRepWord16:
695       opcode = typ == kTypeInt32 ? kCheckedLoadInt16 : kCheckedLoadUint16;
696       break;
697     case kRepWord32:
698       opcode = kCheckedLoadWord32;
699       break;
700     case kRepFloat32:
701       opcode = kCheckedLoadFloat32;
702       break;
703     case kRepFloat64:
704       opcode = kCheckedLoadFloat64;
705       break;
706     default:
707       UNREACHABLE();
708       return;
709   }
710   InstructionOperand offset_operand = g.CanBeImmediate(offset, opcode)
711                                           ? g.UseImmediate(offset)
712                                           : g.UseRegister(offset);
713
714   InstructionOperand length_operand = (!g.CanBeImmediate(offset, opcode))
715                                           ? g.CanBeImmediate(length, opcode)
716                                                 ? g.UseImmediate(length)
717                                                 : g.UseRegister(length)
718                                           : g.UseRegister(length);
719
720   Emit(opcode | AddressingModeField::encode(kMode_MRI),
721        g.DefineAsRegister(node), offset_operand, length_operand,
722        g.UseRegister(buffer));
723 }
724
725
726 void InstructionSelector::VisitCheckedStore(Node* node) {
727   MachineType rep = RepresentationOf(OpParameter<MachineType>(node));
728   Mips64OperandGenerator g(this);
729   Node* const buffer = node->InputAt(0);
730   Node* const offset = node->InputAt(1);
731   Node* const length = node->InputAt(2);
732   Node* const value = node->InputAt(3);
733   ArchOpcode opcode;
734   switch (rep) {
735     case kRepWord8:
736       opcode = kCheckedStoreWord8;
737       break;
738     case kRepWord16:
739       opcode = kCheckedStoreWord16;
740       break;
741     case kRepWord32:
742       opcode = kCheckedStoreWord32;
743       break;
744     case kRepFloat32:
745       opcode = kCheckedStoreFloat32;
746       break;
747     case kRepFloat64:
748       opcode = kCheckedStoreFloat64;
749       break;
750     default:
751       UNREACHABLE();
752       return;
753   }
754   InstructionOperand offset_operand = g.CanBeImmediate(offset, opcode)
755                                           ? g.UseImmediate(offset)
756                                           : g.UseRegister(offset);
757
758   InstructionOperand length_operand = (!g.CanBeImmediate(offset, opcode))
759                                           ? g.CanBeImmediate(length, opcode)
760                                                 ? g.UseImmediate(length)
761                                                 : g.UseRegister(length)
762                                           : g.UseRegister(length);
763
764   Emit(opcode | AddressingModeField::encode(kMode_MRI), g.NoOutput(),
765        offset_operand, length_operand, g.UseRegister(value),
766        g.UseRegister(buffer));
767 }
768
769
770 namespace {
771
772 // Shared routine for multiple compare operations.
773 static void VisitCompare(InstructionSelector* selector, InstructionCode opcode,
774                          InstructionOperand left, InstructionOperand right,
775                          FlagsContinuation* cont) {
776   Mips64OperandGenerator g(selector);
777   opcode = cont->Encode(opcode);
778   if (cont->IsBranch()) {
779     selector->Emit(opcode, g.NoOutput(), left, right,
780                    g.Label(cont->true_block()), g.Label(cont->false_block()));
781   } else {
782     DCHECK(cont->IsSet());
783     selector->Emit(opcode, g.DefineAsRegister(cont->result()), left, right);
784   }
785 }
786
787
788 // Shared routine for multiple float compare operations.
789 void VisitFloat64Compare(InstructionSelector* selector, Node* node,
790                          FlagsContinuation* cont) {
791   Mips64OperandGenerator g(selector);
792   Node* left = node->InputAt(0);
793   Node* right = node->InputAt(1);
794   VisitCompare(selector, kMips64CmpD, g.UseRegister(left), g.UseRegister(right),
795                cont);
796 }
797
798
799 // Shared routine for multiple word compare operations.
800 void VisitWordCompare(InstructionSelector* selector, Node* node,
801                       InstructionCode opcode, FlagsContinuation* cont,
802                       bool commutative) {
803   Mips64OperandGenerator g(selector);
804   Node* left = node->InputAt(0);
805   Node* right = node->InputAt(1);
806
807   // Match immediates on left or right side of comparison.
808   if (g.CanBeImmediate(right, opcode)) {
809     VisitCompare(selector, opcode, g.UseRegister(left), g.UseImmediate(right),
810                  cont);
811   } else if (g.CanBeImmediate(left, opcode)) {
812     if (!commutative) cont->Commute();
813     VisitCompare(selector, opcode, g.UseRegister(right), g.UseImmediate(left),
814                  cont);
815   } else {
816     VisitCompare(selector, opcode, g.UseRegister(left), g.UseRegister(right),
817                  cont);
818   }
819 }
820
821
822 void VisitWord32Compare(InstructionSelector* selector, Node* node,
823                         FlagsContinuation* cont) {
824   VisitWordCompare(selector, node, kMips64Cmp, cont, false);
825 }
826
827
828 void VisitWord64Compare(InstructionSelector* selector, Node* node,
829                         FlagsContinuation* cont) {
830   VisitWordCompare(selector, node, kMips64Cmp, cont, false);
831 }
832
833 }  // namespace
834
835
836 void EmitWordCompareZero(InstructionSelector* selector, Node* value,
837                          FlagsContinuation* cont) {
838   Mips64OperandGenerator g(selector);
839   InstructionCode opcode = cont->Encode(kMips64Cmp);
840   InstructionOperand const value_operand = g.UseRegister(value);
841   if (cont->IsBranch()) {
842     selector->Emit(opcode, g.NoOutput(), value_operand, g.TempImmediate(0),
843                    g.Label(cont->true_block()), g.Label(cont->false_block()));
844   } else {
845     selector->Emit(opcode, g.DefineAsRegister(cont->result()), value_operand,
846                    g.TempImmediate(0));
847   }
848 }
849
850
851 // Shared routine for word comparisons against zero.
852 void VisitWordCompareZero(InstructionSelector* selector, Node* user,
853                           Node* value, FlagsContinuation* cont) {
854   while (selector->CanCover(user, value)) {
855     switch (value->opcode()) {
856       case IrOpcode::kWord32Equal: {
857         // Combine with comparisons against 0 by simply inverting the
858         // continuation.
859         Int32BinopMatcher m(value);
860         if (m.right().Is(0)) {
861           user = value;
862           value = m.left().node();
863           cont->Negate();
864           continue;
865         }
866         cont->OverwriteAndNegateIfEqual(kEqual);
867         return VisitWord32Compare(selector, value, cont);
868       }
869       case IrOpcode::kInt32LessThan:
870         cont->OverwriteAndNegateIfEqual(kSignedLessThan);
871         return VisitWord32Compare(selector, value, cont);
872       case IrOpcode::kInt32LessThanOrEqual:
873         cont->OverwriteAndNegateIfEqual(kSignedLessThanOrEqual);
874         return VisitWord32Compare(selector, value, cont);
875       case IrOpcode::kUint32LessThan:
876         cont->OverwriteAndNegateIfEqual(kUnsignedLessThan);
877         return VisitWord32Compare(selector, value, cont);
878       case IrOpcode::kUint32LessThanOrEqual:
879         cont->OverwriteAndNegateIfEqual(kUnsignedLessThanOrEqual);
880         return VisitWord32Compare(selector, value, cont);
881       case IrOpcode::kWord64Equal: {
882         // Combine with comparisons against 0 by simply inverting the
883         // continuation.
884         Int64BinopMatcher m(value);
885         if (m.right().Is(0)) {
886           user = value;
887           value = m.left().node();
888           cont->Negate();
889           continue;
890         }
891         cont->OverwriteAndNegateIfEqual(kEqual);
892         return VisitWord64Compare(selector, value, cont);
893       }
894       case IrOpcode::kInt64LessThan:
895         cont->OverwriteAndNegateIfEqual(kSignedLessThan);
896         return VisitWord64Compare(selector, value, cont);
897       case IrOpcode::kInt64LessThanOrEqual:
898         cont->OverwriteAndNegateIfEqual(kSignedLessThanOrEqual);
899         return VisitWord64Compare(selector, value, cont);
900       case IrOpcode::kUint64LessThan:
901         cont->OverwriteAndNegateIfEqual(kUnsignedLessThan);
902         return VisitWord64Compare(selector, value, cont);
903       case IrOpcode::kFloat64Equal:
904         cont->OverwriteAndNegateIfEqual(kEqual);
905         return VisitFloat64Compare(selector, value, cont);
906       case IrOpcode::kFloat64LessThan:
907         cont->OverwriteAndNegateIfEqual(kUnsignedLessThan);
908         return VisitFloat64Compare(selector, value, cont);
909       case IrOpcode::kFloat64LessThanOrEqual:
910         cont->OverwriteAndNegateIfEqual(kUnsignedLessThanOrEqual);
911         return VisitFloat64Compare(selector, value, cont);
912       case IrOpcode::kProjection:
913         // Check if this is the overflow output projection of an
914         // <Operation>WithOverflow node.
915         if (ProjectionIndexOf(value->op()) == 1u) {
916           // We cannot combine the <Operation>WithOverflow with this branch
917           // unless the 0th projection (the use of the actual value of the
918           // <Operation> is either NULL, which means there's no use of the
919           // actual value, or was already defined, which means it is scheduled
920           // *AFTER* this branch).
921           Node* const node = value->InputAt(0);
922           Node* const result = NodeProperties::FindProjection(node, 0);
923           if (result == NULL || selector->IsDefined(result)) {
924             switch (node->opcode()) {
925               case IrOpcode::kInt32AddWithOverflow:
926                 cont->OverwriteAndNegateIfEqual(kOverflow);
927                 return VisitBinop(selector, node, kMips64Dadd, cont);
928               case IrOpcode::kInt32SubWithOverflow:
929                 cont->OverwriteAndNegateIfEqual(kOverflow);
930                 return VisitBinop(selector, node, kMips64Dsub, cont);
931               default:
932                 break;
933             }
934           }
935         }
936         break;
937       case IrOpcode::kWord32And:
938       case IrOpcode::kWord64And:
939         return VisitWordCompare(selector, value, kMips64Tst, cont, true);
940       default:
941         break;
942     }
943     break;
944   }
945
946   // Continuation could not be combined with a compare, emit compare against 0.
947   EmitWordCompareZero(selector, value, cont);
948 }
949
950
951 void InstructionSelector::VisitBranch(Node* branch, BasicBlock* tbranch,
952                                       BasicBlock* fbranch) {
953   FlagsContinuation cont(kNotEqual, tbranch, fbranch);
954   VisitWordCompareZero(this, branch, branch->InputAt(0), &cont);
955 }
956
957
958 void InstructionSelector::VisitSwitch(Node* node, const SwitchInfo& sw) {
959   Mips64OperandGenerator g(this);
960   InstructionOperand value_operand = g.UseRegister(node->InputAt(0));
961
962   // Emit either ArchTableSwitch or ArchLookupSwitch.
963   size_t table_space_cost = 10 + 2 * sw.value_range;
964   size_t table_time_cost = 3;
965   size_t lookup_space_cost = 2 + 2 * sw.case_count;
966   size_t lookup_time_cost = sw.case_count;
967   if (sw.case_count > 0 &&
968       table_space_cost + 3 * table_time_cost <=
969           lookup_space_cost + 3 * lookup_time_cost &&
970       sw.min_value > std::numeric_limits<int32_t>::min()) {
971     InstructionOperand index_operand = value_operand;
972     if (sw.min_value) {
973       index_operand = g.TempRegister();
974       Emit(kMips64Sub, index_operand, value_operand,
975            g.TempImmediate(sw.min_value));
976     }
977     // Generate a table lookup.
978     return EmitTableSwitch(sw, index_operand);
979   }
980
981   // Generate a sequence of conditional jumps.
982   return EmitLookupSwitch(sw, value_operand);
983 }
984
985
986 void InstructionSelector::VisitWord32Equal(Node* const node) {
987   FlagsContinuation cont(kEqual, node);
988   Int32BinopMatcher m(node);
989   if (m.right().Is(0)) {
990     return VisitWordCompareZero(this, m.node(), m.left().node(), &cont);
991   }
992
993   VisitWord32Compare(this, node, &cont);
994 }
995
996
997 void InstructionSelector::VisitInt32LessThan(Node* node) {
998   FlagsContinuation cont(kSignedLessThan, node);
999   VisitWord32Compare(this, node, &cont);
1000 }
1001
1002
1003 void InstructionSelector::VisitInt32LessThanOrEqual(Node* node) {
1004   FlagsContinuation cont(kSignedLessThanOrEqual, node);
1005   VisitWord32Compare(this, node, &cont);
1006 }
1007
1008
1009 void InstructionSelector::VisitUint32LessThan(Node* node) {
1010   FlagsContinuation cont(kUnsignedLessThan, node);
1011   VisitWord32Compare(this, node, &cont);
1012 }
1013
1014
1015 void InstructionSelector::VisitUint32LessThanOrEqual(Node* node) {
1016   FlagsContinuation cont(kUnsignedLessThanOrEqual, node);
1017   VisitWord32Compare(this, node, &cont);
1018 }
1019
1020
1021 void InstructionSelector::VisitInt32AddWithOverflow(Node* node) {
1022   if (Node* ovf = NodeProperties::FindProjection(node, 1)) {
1023     FlagsContinuation cont(kOverflow, ovf);
1024     return VisitBinop(this, node, kMips64Dadd, &cont);
1025   }
1026   FlagsContinuation cont;
1027   VisitBinop(this, node, kMips64Dadd, &cont);
1028 }
1029
1030
1031 void InstructionSelector::VisitInt32SubWithOverflow(Node* node) {
1032   if (Node* ovf = NodeProperties::FindProjection(node, 1)) {
1033     FlagsContinuation cont(kOverflow, ovf);
1034     return VisitBinop(this, node, kMips64Dsub, &cont);
1035   }
1036   FlagsContinuation cont;
1037   VisitBinop(this, node, kMips64Dsub, &cont);
1038 }
1039
1040
1041 void InstructionSelector::VisitWord64Equal(Node* const node) {
1042   FlagsContinuation cont(kEqual, node);
1043   Int64BinopMatcher m(node);
1044   if (m.right().Is(0)) {
1045     return VisitWordCompareZero(this, m.node(), m.left().node(), &cont);
1046   }
1047
1048   VisitWord64Compare(this, node, &cont);
1049 }
1050
1051
1052 void InstructionSelector::VisitInt64LessThan(Node* node) {
1053   FlagsContinuation cont(kSignedLessThan, node);
1054   VisitWord64Compare(this, node, &cont);
1055 }
1056
1057
1058 void InstructionSelector::VisitInt64LessThanOrEqual(Node* node) {
1059   FlagsContinuation cont(kSignedLessThanOrEqual, node);
1060   VisitWord64Compare(this, node, &cont);
1061 }
1062
1063
1064 void InstructionSelector::VisitUint64LessThan(Node* node) {
1065   FlagsContinuation cont(kUnsignedLessThan, node);
1066   VisitWord64Compare(this, node, &cont);
1067 }
1068
1069
1070 void InstructionSelector::VisitFloat64Equal(Node* node) {
1071   FlagsContinuation cont(kEqual, node);
1072   VisitFloat64Compare(this, node, &cont);
1073 }
1074
1075
1076 void InstructionSelector::VisitFloat64LessThan(Node* node) {
1077   FlagsContinuation cont(kUnsignedLessThan, node);
1078   VisitFloat64Compare(this, node, &cont);
1079 }
1080
1081
1082 void InstructionSelector::VisitFloat64LessThanOrEqual(Node* node) {
1083   FlagsContinuation cont(kUnsignedLessThanOrEqual, node);
1084   VisitFloat64Compare(this, node, &cont);
1085 }
1086
1087
1088 void InstructionSelector::VisitFloat64ExtractLowWord32(Node* node) {
1089   Mips64OperandGenerator g(this);
1090   Emit(kMips64Float64ExtractLowWord32, g.DefineAsRegister(node),
1091        g.UseRegister(node->InputAt(0)));
1092 }
1093
1094
1095 void InstructionSelector::VisitFloat64ExtractHighWord32(Node* node) {
1096   Mips64OperandGenerator g(this);
1097   Emit(kMips64Float64ExtractHighWord32, g.DefineAsRegister(node),
1098        g.UseRegister(node->InputAt(0)));
1099 }
1100
1101
1102 void InstructionSelector::VisitFloat64InsertLowWord32(Node* node) {
1103   Mips64OperandGenerator g(this);
1104   Node* left = node->InputAt(0);
1105   Node* right = node->InputAt(1);
1106   Emit(kMips64Float64InsertLowWord32, g.DefineSameAsFirst(node),
1107        g.UseRegister(left), g.UseRegister(right));
1108 }
1109
1110
1111 void InstructionSelector::VisitFloat64InsertHighWord32(Node* node) {
1112   Mips64OperandGenerator g(this);
1113   Node* left = node->InputAt(0);
1114   Node* right = node->InputAt(1);
1115   Emit(kMips64Float64InsertHighWord32, g.DefineSameAsFirst(node),
1116        g.UseRegister(left), g.UseRegister(right));
1117 }
1118
1119
1120 // static
1121 MachineOperatorBuilder::Flags
1122 InstructionSelector::SupportedMachineOperatorFlags() {
1123   return MachineOperatorBuilder::kFloat64RoundDown |
1124          MachineOperatorBuilder::kFloat64RoundTruncate;
1125 }
1126
1127 }  // namespace compiler
1128 }  // namespace internal
1129 }  // namespace v8