[ia32] Use more efficient encoding when loading integer constants.
authorbmeurer@chromium.org <bmeurer@chromium.org>
Thu, 9 Oct 2014 12:50:13 +0000 (12:50 +0000)
committerbmeurer@chromium.org <bmeurer@chromium.org>
Thu, 9 Oct 2014 12:50:13 +0000 (12:50 +0000)
R=titzer@chromium.org

Review URL: https://codereview.chromium.org/642053002

git-svn-id: https://v8.googlecode.com/svn/branches/bleeding_edge@24494 ce2b1a6d-e550-0410-aec6-3dcde31c8c00

src/compiler/ia32/code-generator-ia32.cc

index 84933de..0c71bf3 100644 (file)
@@ -249,7 +249,7 @@ void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
       __ idiv(i.InputOperand(1));
       break;
     case kIA32Udiv:
-      __ xor_(edx, edx);
+      __ Move(edx, Immediate(0));
       __ div(i.InputOperand(1));
       break;
     case kIA32Not:
@@ -552,7 +552,7 @@ void CodeGenerator::AssembleArchBoolean(Instruction* instr,
   switch (condition) {
     case kUnorderedEqual:
       __ j(parity_odd, &check, Label::kNear);
-      __ mov(reg, Immediate(0));
+      __ Move(reg, Immediate(0));
       __ jmp(&done, Label::kNear);
     // Fall through.
     case kEqual:
@@ -580,7 +580,7 @@ void CodeGenerator::AssembleArchBoolean(Instruction* instr,
       break;
     case kUnorderedLessThan:
       __ j(parity_odd, &check, Label::kNear);
-      __ mov(reg, Immediate(0));
+      __ Move(reg, Immediate(0));
       __ jmp(&done, Label::kNear);
     // Fall through.
     case kUnsignedLessThan:
@@ -596,7 +596,7 @@ void CodeGenerator::AssembleArchBoolean(Instruction* instr,
       break;
     case kUnorderedLessThanOrEqual:
       __ j(parity_odd, &check, Label::kNear);
-      __ mov(reg, Immediate(0));
+      __ Move(reg, Immediate(0));
       __ jmp(&done, Label::kNear);
     // Fall through.
     case kUnsignedLessThanOrEqual:
@@ -626,7 +626,7 @@ void CodeGenerator::AssembleArchBoolean(Instruction* instr,
     // Emit a branch to set a register to either 1 or 0.
     Label set;
     __ j(cc, &set, Label::kNear);
-    __ mov(reg, Immediate(0));
+    __ Move(reg, Immediate(0));
     __ jmp(&done, Label::kNear);
     __ bind(&set);
     __ mov(reg, Immediate(1));
@@ -899,10 +899,10 @@ void CodeGenerator::AssembleMove(InstructionOperand* source,
       }
     } else if (destination->IsRegister()) {
       Register dst = g.ToRegister(destination);
-      __ mov(dst, g.ToImmediate(source));
+      __ Move(dst, g.ToImmediate(source));
     } else if (destination->IsStackSlot()) {
       Operand dst = g.ToOperand(destination);
-      __ mov(dst, g.ToImmediate(source));
+      __ Move(dst, g.ToImmediate(source));
     } else if (src_constant.type() == Constant::kFloat32) {
       // TODO(turbofan): Can we do better here?
       uint32_t src = bit_cast<uint32_t>(src_constant.ToFloat32());
@@ -912,7 +912,7 @@ void CodeGenerator::AssembleMove(InstructionOperand* source,
       } else {
         DCHECK(destination->IsDoubleStackSlot());
         Operand dst = g.ToOperand(destination);
-        __ mov(dst, Immediate(src));
+        __ Move(dst, Immediate(src));
       }
     } else {
       DCHECK_EQ(Constant::kFloat64, src_constant.type());
@@ -926,8 +926,8 @@ void CodeGenerator::AssembleMove(InstructionOperand* source,
         DCHECK(destination->IsDoubleStackSlot());
         Operand dst0 = g.ToOperand(destination);
         Operand dst1 = g.HighOperand(destination);
-        __ mov(dst0, Immediate(lower));
-        __ mov(dst1, Immediate(upper));
+        __ Move(dst0, Immediate(lower));
+        __ Move(dst1, Immediate(upper));
       }
     }
   } else if (source->IsDoubleRegister()) {