void MacroAssembler::CallApiFunctionAndReturn(
- Address function_address,
+ Register function_address,
Address thunk_address,
Register thunk_last_arg,
int stack_space,
ExternalReference scheduled_exception_address =
ExternalReference::scheduled_exception_address(isolate());
+ ASSERT(rdx.is(function_address) || r8.is(function_address));
// Allocate HandleScope in callee-save registers.
Register prev_next_address_reg = r14;
Register prev_limit_reg = rbx;
j(zero, &profiler_disabled);
// Third parameter is the address of the actual getter function.
- Move(thunk_last_arg, function_address, RelocInfo::EXTERNAL_REFERENCE);
+ Move(thunk_last_arg, function_address);
Move(rax, thunk_address, RelocInfo::EXTERNAL_REFERENCE);
jmp(&end_profiler_check);
bind(&profiler_disabled);
// Call the api function!
- Move(rax, reinterpret_cast<Address>(function_address),
- RelocInfo::EXTERNAL_REFERENCE);
+ Move(rax, function_address);
bind(&end_profiler_check);
}
// R12 to r15 are callee save on all platforms.
if (fp_mode == kSaveFPRegs) {
- subq(rsp, Immediate(kDoubleSize * XMMRegister::kMaxNumRegisters));
+ subq(rsp, Immediate(kSIMD128Size * XMMRegister::kMaxNumRegisters));
for (int i = 0; i < XMMRegister::kMaxNumRegisters; i++) {
XMMRegister reg = XMMRegister::from_code(i);
- movsd(Operand(rsp, i * kDoubleSize), reg);
+ movups(Operand(rsp, i * kSIMD128Size), reg);
}
}
}
if (fp_mode == kSaveFPRegs) {
for (int i = 0; i < XMMRegister::kMaxNumRegisters; i++) {
XMMRegister reg = XMMRegister::from_code(i);
- movsd(reg, Operand(rsp, i * kDoubleSize));
+ movups(reg, Operand(rsp, i * kSIMD128Size));
}
- addq(rsp, Immediate(kDoubleSize * XMMRegister::kMaxNumRegisters));
+ addq(rsp, Immediate(kSIMD128Size * XMMRegister::kMaxNumRegisters));
}
for (int i = kNumberOfSavedRegs - 1; i >= 0; i--) {
Register reg = saved_regs[i];
}
+void MacroAssembler::absps(XMMRegister dst) {
+ static const struct V8_ALIGNED(16) {
+ uint32_t a;
+ uint32_t b;
+ uint32_t c;
+ uint32_t d;
+ } float_absolute_constant =
+ { 0x7FFFFFFF, 0x7FFFFFFF, 0x7FFFFFFF, 0x7FFFFFFF };
+ Set(kScratchRegister, reinterpret_cast<intptr_t>(&float_absolute_constant));
+ andps(dst, Operand(kScratchRegister, 0));
+}
+
+
+void MacroAssembler::negateps(XMMRegister dst) {
+ static const struct V8_ALIGNED(16) {
+ uint32_t a;
+ uint32_t b;
+ uint32_t c;
+ uint32_t d;
+ } float_negate_constant =
+ { 0x80000000, 0x80000000, 0x80000000, 0x80000000 };
+ Set(kScratchRegister, reinterpret_cast<intptr_t>(&float_negate_constant));
+ xorps(dst, Operand(kScratchRegister, 0));
+}
+
+
+void MacroAssembler::notps(XMMRegister dst) {
+ static const struct V8_ALIGNED(16) {
+ uint32_t a;
+ uint32_t b;
+ uint32_t c;
+ uint32_t d;
+ } float_not_constant =
+ { 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF };
+ Set(kScratchRegister, reinterpret_cast<intptr_t>(&float_not_constant));
+ xorps(dst, Operand(kScratchRegister, 0));
+}
+
+
+void MacroAssembler::pnegd(XMMRegister dst) {
+ static const struct V8_ALIGNED(16) {
+ uint32_t a;
+ uint32_t b;
+ uint32_t c;
+ uint32_t d;
+ } int32_one_constant = { 0x1, 0x1, 0x1, 0x1 };
+ notps(dst);
+ Set(kScratchRegister, reinterpret_cast<intptr_t>(&int32_one_constant));
+ paddd(dst, Operand(kScratchRegister, 0));
+}
+
+
+
void MacroAssembler::JumpIfNotString(Register object,
Register object_map,
Label* not_string,
#endif
// Optionally save all XMM registers.
if (save_doubles) {
- int space = XMMRegister::kMaxNumAllocatableRegisters * kDoubleSize +
+ int space = XMMRegister::kMaxNumAllocatableRegisters * kSIMD128Size +
arg_stack_space * kRegisterSize;
subq(rsp, Immediate(space));
int offset = -2 * kPointerSize;
for (int i = 0; i < XMMRegister::NumAllocatableRegisters(); i++) {
XMMRegister reg = XMMRegister::FromAllocationIndex(i);
- movsd(Operand(rbp, offset - ((i + 1) * kDoubleSize)), reg);
+ movups(Operand(rbp, offset - ((i + 1) * kSIMD128Size)), reg);
}
} else if (arg_stack_space > 0) {
subq(rsp, Immediate(arg_stack_space * kRegisterSize));
int offset = -2 * kPointerSize;
for (int i = 0; i < XMMRegister::NumAllocatableRegisters(); i++) {
XMMRegister reg = XMMRegister::FromAllocationIndex(i);
- movsd(reg, Operand(rbp, offset - ((i + 1) * kDoubleSize)));
+ movups(reg, Operand(rbp, offset - ((i + 1) * kSIMD128Size)));
}
}
// Get the return address from the stack and restore the frame pointer.
}
+void MacroAssembler::AllocateSIMDHeapObject(int size,
+ Register result,
+ Register scratch,
+ Label* gc_required,
+ Heap::RootListIndex map_index) {
+ Allocate(size, result, scratch, no_reg, gc_required, TAG_OBJECT);
+
+ // Set the map.
+ LoadRoot(kScratchRegister, map_index);
+ movp(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
+}
+
+
void MacroAssembler::AllocateTwoByteString(Register result,
Register length,
Register scratch1,
uint32_t encoding_mask) {
Label is_object;
JumpIfNotSmi(string, &is_object);
- Throw(kNonObject);
+ Abort(kNonObject);
bind(&is_object);
push(value);
andb(value, Immediate(kStringRepresentationMask | kStringEncodingMask));
cmpq(value, Immediate(encoding_mask));
pop(value);
- ThrowIf(not_equal, kUnexpectedStringType);
+ Check(equal, kUnexpectedStringType);
// The index is assumed to be untagged coming in, tag it to compare with the
// string length without using a temp register, it is restored at the end of
// this function.
Integer32ToSmi(index, index);
SmiCompare(index, FieldOperand(string, String::kLengthOffset));
- ThrowIf(greater_equal, kIndexIsTooLarge);
+ Check(less, kIndexIsTooLarge);
SmiCompare(index, Smi::FromInt(0));
- ThrowIf(less, kIndexIsNegative);
+ Check(greater_equal, kIndexIsNegative);
// Restore the index
SmiToInteger32(index, index);