__ Sub(argc, argc, 1);
__ Claim(argc, kXRegSizeInBytes);
// jssp now point to args[0], load and drop args[0] + receiver.
- // TODO(jbramley): Consider adding ClaimAndPoke.
- __ Ldr(argc, MemOperand(jssp, 2 * kPointerSize, PostIndex));
+ Register arg = argc;
+ __ Ldr(arg, MemOperand(jssp, 2 * kPointerSize, PostIndex));
+ argc = NoReg;
Register argument = x2;
Label not_cached, argument_is_string;
- __ LookupNumberStringCache(argc, // Input.
+ __ LookupNumberStringCache(arg, // Input.
argument, // Result.
x10, // Scratch.
x11, // Scratch.
// if it's a string already before calling the conversion builtin.
Label convert_argument;
__ Bind(¬_cached);
- __ JumpIfSmi(argc, &convert_argument);
+ __ JumpIfSmi(arg, &convert_argument);
// Is it a String?
__ Ldr(x10, FieldMemOperand(x0, HeapObject::kMapOffset));
__ Ldrb(x11, FieldMemOperand(x10, Map::kInstanceTypeOffset));
__ Tbnz(x11, MaskToBit(kIsNotStringMask), &convert_argument);
- __ Mov(argument, argc);
+ __ Mov(argument, arg);
__ IncrementCounter(counters->string_ctor_string_value(), 1, x10, x11);
__ B(&argument_is_string);
__ IncrementCounter(counters->string_ctor_conversions(), 1, x10, x11);
{
FrameScope scope(masm, StackFrame::INTERNAL);
- __ Push(argc);
+ __ Push(arg);
__ InvokeBuiltin(Builtins::TO_STRING, CALL_FUNCTION);
}
__ Pop(function);
// x28 : JS stack pointer (jssp).
// x29 : frame pointer (fp).
- // TODO(alexandre): Revisit the MAsm function invocation mechanisms.
- // Currently there is a mix of statically and dynamically allocated
- // registers.
__ Mov(x0, argc);
if (is_construct) {
// No type feedback cell is available.
__ Str(argument, MemOperand(end_elements));
// Fill the rest with holes.
__ LoadRoot(x10, Heap::kTheHoleValueRootIndex);
- for (int i = 1; i < kAllocationDelta; i++) {
- // TODO(all): Try to use stp here.
- __ Str(x10, MemOperand(end_elements, i * kPointerSize));
- }
+ ASSERT(kAllocationDelta == 4);
+ __ Stp(x10, x10, MemOperand(end_elements, 1 * kPointerSize));
+ __ Stp(x10, x10, MemOperand(end_elements, 3 * kPointerSize));
// Update elements' and array's sizes.
__ Str(length, FieldMemOperand(receiver, JSArray::kLengthOffset));
MacroAssembler::NoUseRealAbortsScope no_use_real_aborts(masm);
// The entry hook is a "BumpSystemStackPointer" instruction (sub), followed by
// a "Push lr" instruction, followed by a call.
- // TODO(jbramley): Verify that this call is always made with relocation.
static const int kReturnAddressDistanceFromFunctionStart =
Assembler::kCallSizeWithRelocation + (2 * kInstructionSize);
for (int i = 0; i <= last_index; ++i) {
Label next;
ElementsKind candidate_kind = GetFastElementsKindFromSequenceIndex(i);
- // TODO(jbramley): Is this the best way to handle this? Can we make the
- // tail calls conditional, rather than hopping over each one?
__ CompareAndBranch(kind, candidate_kind, ne, &next);
ArraySingleArgumentConstructorStub stub(candidate_kind);
__ TailCallStub(&stub);
// x0 = FunctionCallbackInfo&
// Arguments is after the return address.
__ Add(x0, masm->StackPointer(), 1 * kPointerSize);
- // FunctionCallbackInfo::implicit_args_
- __ Str(args, MemOperand(x0, 0 * kPointerSize));
- // FunctionCallbackInfo::values_
+ // FunctionCallbackInfo::implicit_args_ and FunctionCallbackInfo::values_
__ Add(x10, args, Operand((FCA::kArgsLength - 1 + argc) * kPointerSize));
- __ Str(x10, MemOperand(x0, 1 * kPointerSize));
- // FunctionCallbackInfo::length_ = argc
- __ Mov(x10, argc);
- __ Str(x10, MemOperand(x0, 2 * kPointerSize));
+ __ Stp(args, x10, MemOperand(x0, 0 * kPointerSize));
+ // FunctionCallbackInfo::length_ = argc and
// FunctionCallbackInfo::is_construct_call = 0
- __ Str(xzr, MemOperand(x0, 3 * kPointerSize));
+ __ Mov(x10, argc);
+ __ Stp(x10, xzr, MemOperand(x0, 2 * kPointerSize));
const int kStackUnwindSpace = argc + FCA::kArgsLength + 1;
Address thunk_address = FUNCTION_ADDR(&InvokeFunctionCallback);
TEST_(adr) {
SET_UP();
- COMPARE(adr(x0, 0), "adr x0, #+0x0");
- COMPARE(adr(x1, 1), "adr x1, #+0x1");
- COMPARE(adr(x2, -1), "adr x2, #-0x1");
- COMPARE(adr(x3, 4), "adr x3, #+0x4");
- COMPARE(adr(x4, -4), "adr x4, #-0x4");
- COMPARE(adr(x5, 0x000fffff), "adr x5, #+0xfffff");
- COMPARE(adr(x6, -0x00100000), "adr x6, #-0x100000");
- COMPARE(adr(xzr, 0), "adr xzr, #+0x0");
+ COMPARE_PREFIX(adr(x0, 0), "adr x0, #+0x0");
+ COMPARE_PREFIX(adr(x1, 1), "adr x1, #+0x1");
+ COMPARE_PREFIX(adr(x2, -1), "adr x2, #-0x1");
+ COMPARE_PREFIX(adr(x3, 4), "adr x3, #+0x4");
+ COMPARE_PREFIX(adr(x4, -4), "adr x4, #-0x4");
+ COMPARE_PREFIX(adr(x5, 0x000fffff), "adr x5, #+0xfffff");
+ COMPARE_PREFIX(adr(x6, -0x00100000), "adr x6, #-0x100000");
+ COMPARE_PREFIX(adr(xzr, 0), "adr xzr, #+0x0");
CLEANUP();
}
SET_UP();
#define INST_OFF(x) ((x) >> kInstructionSizeLog2)
- COMPARE(b(INST_OFF(0x4)), "b #+0x4");
- COMPARE(b(INST_OFF(-0x4)), "b #-0x4");
- COMPARE(b(INST_OFF(0x7fffffc)), "b #+0x7fffffc");
- COMPARE(b(INST_OFF(-0x8000000)), "b #-0x8000000");
- COMPARE(b(INST_OFF(0xffffc), eq), "b.eq #+0xffffc");
- COMPARE(b(INST_OFF(-0x100000), mi), "b.mi #-0x100000");
- COMPARE(bl(INST_OFF(0x4)), "bl #+0x4");
- COMPARE(bl(INST_OFF(-0x4)), "bl #-0x4");
- COMPARE(bl(INST_OFF(0xffffc)), "bl #+0xffffc");
- COMPARE(bl(INST_OFF(-0x100000)), "bl #-0x100000");
- COMPARE(cbz(w0, INST_OFF(0xffffc)), "cbz w0, #+0xffffc");
- COMPARE(cbz(x1, INST_OFF(-0x100000)), "cbz x1, #-0x100000");
- COMPARE(cbnz(w2, INST_OFF(0xffffc)), "cbnz w2, #+0xffffc");
- COMPARE(cbnz(x3, INST_OFF(-0x100000)), "cbnz x3, #-0x100000");
- COMPARE(tbz(w4, 0, INST_OFF(0x7ffc)), "tbz w4, #0, #+0x7ffc");
- COMPARE(tbz(x5, 63, INST_OFF(-0x8000)), "tbz x5, #63, #-0x8000");
- COMPARE(tbz(w6, 31, INST_OFF(0)), "tbz w6, #31, #+0x0");
- COMPARE(tbz(x7, 31, INST_OFF(0x4)), "tbz w7, #31, #+0x4");
- COMPARE(tbz(x8, 32, INST_OFF(0x8)), "tbz x8, #32, #+0x8");
- COMPARE(tbnz(w8, 0, INST_OFF(0x7ffc)), "tbnz w8, #0, #+0x7ffc");
- COMPARE(tbnz(x9, 63, INST_OFF(-0x8000)), "tbnz x9, #63, #-0x8000");
- COMPARE(tbnz(w10, 31, INST_OFF(0)), "tbnz w10, #31, #+0x0");
- COMPARE(tbnz(x11, 31, INST_OFF(0x4)), "tbnz w11, #31, #+0x4");
- COMPARE(tbnz(x12, 32, INST_OFF(0x8)), "tbnz x12, #32, #+0x8");
+ COMPARE_PREFIX(b(INST_OFF(0x4)), "b #+0x4");
+ COMPARE_PREFIX(b(INST_OFF(-0x4)), "b #-0x4");
+ COMPARE_PREFIX(b(INST_OFF(0x7fffffc)), "b #+0x7fffffc");
+ COMPARE_PREFIX(b(INST_OFF(-0x8000000)), "b #-0x8000000");
+ COMPARE_PREFIX(b(INST_OFF(0xffffc), eq), "b.eq #+0xffffc");
+ COMPARE_PREFIX(b(INST_OFF(-0x100000), mi), "b.mi #-0x100000");
+ COMPARE_PREFIX(bl(INST_OFF(0x4)), "bl #+0x4");
+ COMPARE_PREFIX(bl(INST_OFF(-0x4)), "bl #-0x4");
+ COMPARE_PREFIX(bl(INST_OFF(0xffffc)), "bl #+0xffffc");
+ COMPARE_PREFIX(bl(INST_OFF(-0x100000)), "bl #-0x100000");
+ COMPARE_PREFIX(cbz(w0, INST_OFF(0xffffc)), "cbz w0, #+0xffffc");
+ COMPARE_PREFIX(cbz(x1, INST_OFF(-0x100000)), "cbz x1, #-0x100000");
+ COMPARE_PREFIX(cbnz(w2, INST_OFF(0xffffc)), "cbnz w2, #+0xffffc");
+ COMPARE_PREFIX(cbnz(x3, INST_OFF(-0x100000)), "cbnz x3, #-0x100000");
+ COMPARE_PREFIX(tbz(w4, 0, INST_OFF(0x7ffc)), "tbz w4, #0, #+0x7ffc");
+ COMPARE_PREFIX(tbz(x5, 63, INST_OFF(-0x8000)), "tbz x5, #63, #-0x8000");
+ COMPARE_PREFIX(tbz(w6, 31, INST_OFF(0)), "tbz w6, #31, #+0x0");
+ COMPARE_PREFIX(tbz(x7, 31, INST_OFF(0x4)), "tbz w7, #31, #+0x4");
+ COMPARE_PREFIX(tbz(x8, 32, INST_OFF(0x8)), "tbz x8, #32, #+0x8");
+ COMPARE_PREFIX(tbnz(w8, 0, INST_OFF(0x7ffc)), "tbnz w8, #0, #+0x7ffc");
+ COMPARE_PREFIX(tbnz(x9, 63, INST_OFF(-0x8000)), "tbnz x9, #63, #-0x8000");
+ COMPARE_PREFIX(tbnz(w10, 31, INST_OFF(0)), "tbnz w10, #31, #+0x0");
+ COMPARE_PREFIX(tbnz(x11, 31, INST_OFF(0x4)), "tbnz w11, #31, #+0x4");
+ COMPARE_PREFIX(tbnz(x12, 32, INST_OFF(0x8)), "tbnz x12, #32, #+0x8");
COMPARE(br(x0), "br x0");
COMPARE(blr(x1), "blr x1");
COMPARE(ret(x2), "ret x2");