A64: Handle a few TODOs.
authoralexandre.rames@arm.com <alexandre.rames@arm.com@ce2b1a6d-e550-0410-aec6-3dcde31c8c00>
Fri, 7 Mar 2014 15:20:32 +0000 (15:20 +0000)
committeralexandre.rames@arm.com <alexandre.rames@arm.com@ce2b1a6d-e550-0410-aec6-3dcde31c8c00>
Fri, 7 Mar 2014 15:20:32 +0000 (15:20 +0000)
Notes about a few TODOs handled in this patch:

* In ProfileEntryHookStub::Generate:
    Stubs are always called with relocation.

* In CreateArrayDispatchOneArgument:
    The branches to registers can't be conditional. We could use a jump table, but
    there are only 6 different kinds so it is likely not worth it.

* In Builtins::Generate_StringConstructCode:
    Rename the argc register (x0) after its meaning changes.
    Remove a TODO: using a macro would not make the code clearer.

* In Generate_JSEntryTrampolineHelper:
    Remove the TODO and raise an internal issue to investigate this.

* In Disassembler::SubstituteBranchTargetField:
    Print the target address, but we don't have more info on the target.

R=ulan@chromium.org

Review URL: https://codereview.chromium.org/185793002

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@19724 ce2b1a6d-e550-0410-aec6-3dcde31c8c00

src/a64/builtins-a64.cc
src/a64/code-stubs-a64.cc
src/a64/disasm-a64.cc
src/a64/lithium-codegen-a64.cc
test/cctest/test-disasm-a64.cc

index be70009..aafadd7 100644 (file)
@@ -184,12 +184,13 @@ void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
   __ Sub(argc, argc, 1);
   __ Claim(argc, kXRegSizeInBytes);
   // jssp now point to args[0], load and drop args[0] + receiver.
-  // TODO(jbramley): Consider adding ClaimAndPoke.
-  __ Ldr(argc, MemOperand(jssp, 2 * kPointerSize, PostIndex));
+  Register arg = argc;
+  __ Ldr(arg, MemOperand(jssp, 2 * kPointerSize, PostIndex));
+  argc = NoReg;
 
   Register argument = x2;
   Label not_cached, argument_is_string;
-  __ LookupNumberStringCache(argc,       // Input.
+  __ LookupNumberStringCache(arg       // Input.
                              argument,   // Result.
                              x10,        // Scratch.
                              x11,        // Scratch.
@@ -237,13 +238,13 @@ void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
   // if it's a string already before calling the conversion builtin.
   Label convert_argument;
   __ Bind(&not_cached);
-  __ JumpIfSmi(argc, &convert_argument);
+  __ JumpIfSmi(arg, &convert_argument);
 
   // Is it a String?
   __ Ldr(x10, FieldMemOperand(x0, HeapObject::kMapOffset));
   __ Ldrb(x11, FieldMemOperand(x10, Map::kInstanceTypeOffset));
   __ Tbnz(x11, MaskToBit(kIsNotStringMask), &convert_argument);
-  __ Mov(argument, argc);
+  __ Mov(argument, arg);
   __ IncrementCounter(counters->string_ctor_string_value(), 1, x10, x11);
   __ B(&argument_is_string);
 
@@ -253,7 +254,7 @@ void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
   __ IncrementCounter(counters->string_ctor_conversions(), 1, x10, x11);
   {
     FrameScope scope(masm, StackFrame::INTERNAL);
-    __ Push(argc);
+    __ Push(arg);
     __ InvokeBuiltin(Builtins::TO_STRING, CALL_FUNCTION);
   }
   __ Pop(function);
@@ -708,9 +709,6 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
     // x28 : JS stack pointer (jssp).
     // x29 : frame pointer (fp).
 
-    // TODO(alexandre): Revisit the MAsm function invocation mechanisms.
-    // Currently there is a mix of statically and dynamically allocated
-    // registers.
     __ Mov(x0, argc);
     if (is_construct) {
       // No type feedback cell is available.
index a3aad78..2b472da 100644 (file)
@@ -4640,10 +4640,9 @@ void ArrayPushStub::Generate(MacroAssembler* masm) {
   __ Str(argument, MemOperand(end_elements));
   // Fill the rest with holes.
   __ LoadRoot(x10, Heap::kTheHoleValueRootIndex);
-  for (int i = 1; i < kAllocationDelta; i++) {
-    // TODO(all): Try to use stp here.
-    __ Str(x10, MemOperand(end_elements, i * kPointerSize));
-  }
+  ASSERT(kAllocationDelta == 4);
+  __ Stp(x10, x10, MemOperand(end_elements, 1 * kPointerSize));
+  __ Stp(x10, x10, MemOperand(end_elements, 3 * kPointerSize));
 
   // Update elements' and array's sizes.
   __ Str(length, FieldMemOperand(receiver, JSArray::kLengthOffset));
@@ -4970,7 +4969,6 @@ void ProfileEntryHookStub::Generate(MacroAssembler* masm) {
   MacroAssembler::NoUseRealAbortsScope no_use_real_aborts(masm);
   // The entry hook is a "BumpSystemStackPointer" instruction (sub), followed by
   // a "Push lr" instruction, followed by a call.
-  // TODO(jbramley): Verify that this call is always made with relocation.
   static const int kReturnAddressDistanceFromFunctionStart =
       Assembler::kCallSizeWithRelocation + (2 * kInstructionSize);
 
@@ -5394,8 +5392,6 @@ static void CreateArrayDispatchOneArgument(MacroAssembler* masm,
     for (int i = 0; i <= last_index; ++i) {
       Label next;
       ElementsKind candidate_kind = GetFastElementsKindFromSequenceIndex(i);
-      // TODO(jbramley): Is this the best way to handle this? Can we make the
-      // tail calls conditional, rather than hopping over each one?
       __ CompareAndBranch(kind, candidate_kind, ne, &next);
       ArraySingleArgumentConstructorStub stub(candidate_kind);
       __ TailCallStub(&stub);
@@ -5725,16 +5721,13 @@ void CallApiFunctionStub::Generate(MacroAssembler* masm) {
   // x0 = FunctionCallbackInfo&
   // Arguments is after the return address.
   __ Add(x0, masm->StackPointer(), 1 * kPointerSize);
-  // FunctionCallbackInfo::implicit_args_
-  __ Str(args, MemOperand(x0, 0 * kPointerSize));
-  // FunctionCallbackInfo::values_
+  // FunctionCallbackInfo::implicit_args_ and FunctionCallbackInfo::values_
   __ Add(x10, args, Operand((FCA::kArgsLength - 1 + argc) * kPointerSize));
-  __ Str(x10, MemOperand(x0, 1 * kPointerSize));
-  // FunctionCallbackInfo::length_ = argc
-  __ Mov(x10, argc);
-  __ Str(x10, MemOperand(x0, 2 * kPointerSize));
+  __ Stp(args, x10, MemOperand(x0, 0 * kPointerSize));
+  // FunctionCallbackInfo::length_ = argc and
   // FunctionCallbackInfo::is_construct_call = 0
-  __ Str(xzr, MemOperand(x0, 3 * kPointerSize));
+  __ Mov(x10, argc);
+  __ Stp(x10, xzr, MemOperand(x0, 2 * kPointerSize));
 
   const int kStackUnwindSpace = argc + FCA::kArgsLength + 1;
   Address thunk_address = FUNCTION_ADDR(&InvokeFunctionCallback);
index b5e5fdd..970be11 100644 (file)
@@ -1606,8 +1606,8 @@ int Disassembler::SubstitutePCRelAddressField(Instruction* instr,
     offset = -offset;
     sign = '-';
   }
-  // TODO(jbramley): Can we print the target address here?
-  AppendToOutput("#%c0x%x", sign, offset);
+  STATIC_ASSERT(sizeof(*instr) == 1);
+  AppendToOutput("#%c0x%x (addr %p)", sign, offset, instr + offset);
   return 13;
 }
 
@@ -1634,8 +1634,8 @@ int Disassembler::SubstituteBranchTargetField(Instruction* instr,
     offset = -offset;
     sign = '-';
   }
-  // TODO(mcapewel): look up pc + offset in label table.
-  AppendToOutput("#%c0x%" PRIx64, sign, offset);
+  STATIC_ASSERT(sizeof(*instr) == 1);
+  AppendToOutput("#%c0x%" PRIx64 " (addr %p)", sign, offset, instr + offset);
   return 8;
 }
 
index d3b3314..3d2e6e4 100644 (file)
@@ -3743,12 +3743,7 @@ void LCodeGen::DoMathAbsTagged(LMathAbsTagged* instr) {
   // case in DoMathAbs, except that it operates on 64-bit values.
   STATIC_ASSERT((kSmiValueSize == 32) && (kSmiShift == 32) && (kSmiTag == 0));
 
-  // TODO(jbramley): We can't use JumpIfNotSmi here because the tbz it uses
-  // doesn't always have enough range. Consider making a variant of it, or a
-  // TestIsSmi helper.
-  STATIC_ASSERT(kSmiTag == 0);
-  __ Tst(input, kSmiTagMask);
-  __ B(ne, deferred->entry());
+  __ JumpIfNotSmi(input, deferred->entry());
 
   __ Abs(result, input, NULL, &done);
 
index d202bcf..e35455c 100644 (file)
@@ -782,14 +782,14 @@ TEST_(dp_2_source) {
 TEST_(adr) {
   SET_UP();
 
-  COMPARE(adr(x0, 0), "adr x0, #+0x0");
-  COMPARE(adr(x1, 1), "adr x1, #+0x1");
-  COMPARE(adr(x2, -1), "adr x2, #-0x1");
-  COMPARE(adr(x3, 4), "adr x3, #+0x4");
-  COMPARE(adr(x4, -4), "adr x4, #-0x4");
-  COMPARE(adr(x5, 0x000fffff), "adr x5, #+0xfffff");
-  COMPARE(adr(x6, -0x00100000), "adr x6, #-0x100000");
-  COMPARE(adr(xzr, 0), "adr xzr, #+0x0");
+  COMPARE_PREFIX(adr(x0, 0), "adr x0, #+0x0");
+  COMPARE_PREFIX(adr(x1, 1), "adr x1, #+0x1");
+  COMPARE_PREFIX(adr(x2, -1), "adr x2, #-0x1");
+  COMPARE_PREFIX(adr(x3, 4), "adr x3, #+0x4");
+  COMPARE_PREFIX(adr(x4, -4), "adr x4, #-0x4");
+  COMPARE_PREFIX(adr(x5, 0x000fffff), "adr x5, #+0xfffff");
+  COMPARE_PREFIX(adr(x6, -0x00100000), "adr x6, #-0x100000");
+  COMPARE_PREFIX(adr(xzr, 0), "adr xzr, #+0x0");
 
   CLEANUP();
 }
@@ -799,30 +799,30 @@ TEST_(branch) {
   SET_UP();
 
   #define INST_OFF(x) ((x) >> kInstructionSizeLog2)
-  COMPARE(b(INST_OFF(0x4)), "b #+0x4");
-  COMPARE(b(INST_OFF(-0x4)), "b #-0x4");
-  COMPARE(b(INST_OFF(0x7fffffc)), "b #+0x7fffffc");
-  COMPARE(b(INST_OFF(-0x8000000)), "b #-0x8000000");
-  COMPARE(b(INST_OFF(0xffffc), eq), "b.eq #+0xffffc");
-  COMPARE(b(INST_OFF(-0x100000), mi), "b.mi #-0x100000");
-  COMPARE(bl(INST_OFF(0x4)), "bl #+0x4");
-  COMPARE(bl(INST_OFF(-0x4)), "bl #-0x4");
-  COMPARE(bl(INST_OFF(0xffffc)), "bl #+0xffffc");
-  COMPARE(bl(INST_OFF(-0x100000)), "bl #-0x100000");
-  COMPARE(cbz(w0, INST_OFF(0xffffc)), "cbz w0, #+0xffffc");
-  COMPARE(cbz(x1, INST_OFF(-0x100000)), "cbz x1, #-0x100000");
-  COMPARE(cbnz(w2, INST_OFF(0xffffc)), "cbnz w2, #+0xffffc");
-  COMPARE(cbnz(x3, INST_OFF(-0x100000)), "cbnz x3, #-0x100000");
-  COMPARE(tbz(w4, 0, INST_OFF(0x7ffc)), "tbz w4, #0, #+0x7ffc");
-  COMPARE(tbz(x5, 63, INST_OFF(-0x8000)), "tbz x5, #63, #-0x8000");
-  COMPARE(tbz(w6, 31, INST_OFF(0)), "tbz w6, #31, #+0x0");
-  COMPARE(tbz(x7, 31, INST_OFF(0x4)), "tbz w7, #31, #+0x4");
-  COMPARE(tbz(x8, 32, INST_OFF(0x8)), "tbz x8, #32, #+0x8");
-  COMPARE(tbnz(w8, 0, INST_OFF(0x7ffc)), "tbnz w8, #0, #+0x7ffc");
-  COMPARE(tbnz(x9, 63, INST_OFF(-0x8000)), "tbnz x9, #63, #-0x8000");
-  COMPARE(tbnz(w10, 31, INST_OFF(0)), "tbnz w10, #31, #+0x0");
-  COMPARE(tbnz(x11, 31, INST_OFF(0x4)), "tbnz w11, #31, #+0x4");
-  COMPARE(tbnz(x12, 32, INST_OFF(0x8)), "tbnz x12, #32, #+0x8");
+  COMPARE_PREFIX(b(INST_OFF(0x4)), "b #+0x4");
+  COMPARE_PREFIX(b(INST_OFF(-0x4)), "b #-0x4");
+  COMPARE_PREFIX(b(INST_OFF(0x7fffffc)), "b #+0x7fffffc");
+  COMPARE_PREFIX(b(INST_OFF(-0x8000000)), "b #-0x8000000");
+  COMPARE_PREFIX(b(INST_OFF(0xffffc), eq), "b.eq #+0xffffc");
+  COMPARE_PREFIX(b(INST_OFF(-0x100000), mi), "b.mi #-0x100000");
+  COMPARE_PREFIX(bl(INST_OFF(0x4)), "bl #+0x4");
+  COMPARE_PREFIX(bl(INST_OFF(-0x4)), "bl #-0x4");
+  COMPARE_PREFIX(bl(INST_OFF(0xffffc)), "bl #+0xffffc");
+  COMPARE_PREFIX(bl(INST_OFF(-0x100000)), "bl #-0x100000");
+  COMPARE_PREFIX(cbz(w0, INST_OFF(0xffffc)), "cbz w0, #+0xffffc");
+  COMPARE_PREFIX(cbz(x1, INST_OFF(-0x100000)), "cbz x1, #-0x100000");
+  COMPARE_PREFIX(cbnz(w2, INST_OFF(0xffffc)), "cbnz w2, #+0xffffc");
+  COMPARE_PREFIX(cbnz(x3, INST_OFF(-0x100000)), "cbnz x3, #-0x100000");
+  COMPARE_PREFIX(tbz(w4, 0, INST_OFF(0x7ffc)), "tbz w4, #0, #+0x7ffc");
+  COMPARE_PREFIX(tbz(x5, 63, INST_OFF(-0x8000)), "tbz x5, #63, #-0x8000");
+  COMPARE_PREFIX(tbz(w6, 31, INST_OFF(0)), "tbz w6, #31, #+0x0");
+  COMPARE_PREFIX(tbz(x7, 31, INST_OFF(0x4)), "tbz w7, #31, #+0x4");
+  COMPARE_PREFIX(tbz(x8, 32, INST_OFF(0x8)), "tbz x8, #32, #+0x8");
+  COMPARE_PREFIX(tbnz(w8, 0, INST_OFF(0x7ffc)), "tbnz w8, #0, #+0x7ffc");
+  COMPARE_PREFIX(tbnz(x9, 63, INST_OFF(-0x8000)), "tbnz x9, #63, #-0x8000");
+  COMPARE_PREFIX(tbnz(w10, 31, INST_OFF(0)), "tbnz w10, #31, #+0x0");
+  COMPARE_PREFIX(tbnz(x11, 31, INST_OFF(0x4)), "tbnz w11, #31, #+0x4");
+  COMPARE_PREFIX(tbnz(x12, 32, INST_OFF(0x8)), "tbnz x12, #32, #+0x8");
   COMPARE(br(x0), "br x0");
   COMPARE(blr(x1), "blr x1");
   COMPARE(ret(x2), "ret x2");