// Optionally save all XMM registers.
if (save_doubles) {
CpuFeatureScope scope(this, SSE2);
- int space = XMMRegister::kNumRegisters * kDoubleSize + argc * kPointerSize;
+ int space = XMMRegister::kNumRegisters * kSIMD128Size +
+ argc * kPointerSize;
sub(esp, Immediate(space));
const int offset = -2 * kPointerSize;
for (int i = 0; i < XMMRegister::kNumRegisters; i++) {
XMMRegister reg = XMMRegister::from_code(i);
- movsd(Operand(ebp, offset - ((i + 1) * kDoubleSize)), reg);
+ movups(Operand(ebp, offset - ((i + 1) * kSIMD128Size)), reg);
}
} else {
sub(esp, Immediate(argc * kPointerSize));
const int offset = -2 * kPointerSize;
for (int i = 0; i < XMMRegister::kNumRegisters; i++) {
XMMRegister reg = XMMRegister::from_code(i);
- movsd(reg, Operand(ebp, offset - ((i + 1) * kDoubleSize)));
+ movups(reg, Operand(ebp, offset - ((i + 1) * kSIMD128Size)));
}
}
}
+void MacroAssembler::AllocateSIMDHeapObject(int size,
+ Register result,
+ Register scratch,
+ Label* gc_required,
+ Heap::RootListIndex map_index) {
+ Allocate(size, result, scratch, no_reg, gc_required, TAG_OBJECT);
+
+ // Set the map.
+ switch (map_index) {
+ case Heap::kFloat32x4MapRootIndex:
+ mov(FieldOperand(result, HeapObject::kMapOffset),
+ Immediate(isolate()->factory()->float32x4_map()));
+ break;
+ case Heap::kInt32x4MapRootIndex:
+ mov(FieldOperand(result, HeapObject::kMapOffset),
+ Immediate(isolate()->factory()->int32x4_map()));
+ break;
+ default:
+ UNREACHABLE();
+ }
+}
+
+
void MacroAssembler::AllocateTwoByteString(Register result,
Register length,
Register scratch1,
void MacroAssembler::CallApiFunctionAndReturn(
- Address function_address,
+ Register function_address,
Address thunk_address,
Operand thunk_last_arg,
int stack_space,
ExternalReference level_address =
ExternalReference::handle_scope_level_address(isolate());
+ ASSERT(edx.is(function_address));
// Allocate HandleScope in callee-save registers.
mov(ebx, Operand::StaticVariable(next_address));
mov(edi, Operand::StaticVariable(limit_address));
j(zero, &profiler_disabled);
// Additional parameter is the address of the actual getter function.
- mov(thunk_last_arg, Immediate(function_address));
+ mov(thunk_last_arg, function_address);
// Call the api function.
call(thunk_address, RelocInfo::RUNTIME_ENTRY);
jmp(&end_profiler_check);
bind(&profiler_disabled);
// Call the api function.
- call(function_address, RelocInfo::RUNTIME_ENTRY);
+ call(function_address);
bind(&end_profiler_check);
if (FLAG_log_timer_events) {
uint32_t encoding_mask) {
Label is_object;
JumpIfNotSmi(string, &is_object, Label::kNear);
- Throw(kNonObject);
+ Abort(kNonObject);
bind(&is_object);
push(value);
and_(value, Immediate(kStringRepresentationMask | kStringEncodingMask));
cmp(value, Immediate(encoding_mask));
pop(value);
- ThrowIf(not_equal, kUnexpectedStringType);
+ Check(equal, kUnexpectedStringType);
// The index is assumed to be untagged coming in, tag it to compare with the
// string length without using a temp register, it is restored at the end of
// this function.
SmiTag(index);
- // Can't use overflow here directly, compiler can't seem to disambiguate.
- ThrowIf(NegateCondition(no_overflow), kIndexIsTooLarge);
+ Check(no_overflow, kIndexIsTooLarge);
cmp(index, FieldOperand(string, String::kLengthOffset));
- ThrowIf(greater_equal, kIndexIsTooLarge);
+ Check(less, kIndexIsTooLarge);
cmp(index, Immediate(Smi::FromInt(0)));
- ThrowIf(less, kIndexIsNegative);
+ Check(greater_equal, kIndexIsNegative);
// Restore the index
SmiUntag(index);
j(not_equal, &loop_again);
}
+
+void MacroAssembler::absps(XMMRegister dst) {
+ static const struct V8_ALIGNED(16) {
+ uint32_t a;
+ uint32_t b;
+ uint32_t c;
+ uint32_t d;
+ } float_absolute_constant =
+ { 0x7FFFFFFF, 0x7FFFFFFF, 0x7FFFFFFF, 0x7FFFFFFF };
+ andps(dst,
+ Operand(reinterpret_cast<int32_t>(&float_absolute_constant),
+ RelocInfo::NONE32));
+}
+
+
+void MacroAssembler::notps(XMMRegister dst) {
+ static const struct V8_ALIGNED(16) {
+ uint32_t a;
+ uint32_t b;
+ uint32_t c;
+ uint32_t d;
+ } float_not_constant =
+ { 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF };
+ xorps(dst,
+ Operand(reinterpret_cast<int32_t>(&float_not_constant),
+ RelocInfo::NONE32));
+}
+
+
+void MacroAssembler::negateps(XMMRegister dst) {
+ static const struct V8_ALIGNED(16) {
+ uint32_t a;
+ uint32_t b;
+ uint32_t c;
+ uint32_t d;
+ } float_negate_constant =
+ { 0x80000000, 0x80000000, 0x80000000, 0x80000000 };
+ xorps(dst,
+ Operand(reinterpret_cast<int32_t>(&float_negate_constant),
+ RelocInfo::NONE32));
+}
+
+
+void MacroAssembler::pnegd(XMMRegister dst) {
+ static const struct V8_ALIGNED(16) {
+ uint32_t a;
+ uint32_t b;
+ uint32_t c;
+ uint32_t d;
+ } int32_one_constant = { 0x1, 0x1, 0x1, 0x1 };
+ notps(dst);
+ paddd(dst,
+ Operand(reinterpret_cast<int32_t>(&int32_one_constant),
+ RelocInfo::NONE32));
+}
+
+
} } // namespace v8::internal
#endif // V8_TARGET_ARCH_IA32