// Result returned in eax, or eax+edx if result_size_ is 2.
+ // Check stack alignment.
+ if (FLAG_debug_code) {
+ __ CheckStackAlignment();
+ }
+
if (do_gc) {
+ // Pass failure code returned from last attempt as first argument to
+ // PerformGC. No need to use PrepareCallCFunction/CallCFunction here as the
+ // stack alignment is known to be correct. This function takes one argument
+ // which is passed on the stack, and we know that the stack has been
+ // prepared to pass at least one argument.
__ mov(Operand(esp, 0 * kPointerSize), eax); // Result.
__ call(FUNCTION_ADDR(Runtime::PerformGC), RelocInfo::RUNTIME_ENTRY);
}
}
+void MacroAssembler::CheckStackAlignment() {
+ int frame_alignment = OS::ActivationFrameAlignment();
+ int frame_alignment_mask = frame_alignment - 1;
+ if (frame_alignment > kPointerSize) {
+ ASSERT(IsPowerOf2(frame_alignment));
+ Label alignment_as_expected;
+ test(esp, Immediate(frame_alignment_mask));
+ j(zero, &alignment_as_expected);
+ // Abort if stack is not aligned.
+ int3();
+ bind(&alignment_as_expected);
+ }
+}
+
+
void MacroAssembler::Abort(const char* msg) {
// We want to pass the msg string like a smi to avoid GC
// problems, however msg is not guaranteed to be aligned
void MacroAssembler::CallCFunction(Register function,
int num_arguments) {
+ // Check stack alignment.
+ if (FLAG_debug_code) {
+ CheckStackAlignment();
+ }
+
call(Operand(function));
if (OS::ActivationFrameAlignment() != 0) {
mov(esp, Operand(esp, num_arguments * kPointerSize));
// Print a message to stdout and abort execution.
void Abort(const char* msg);
+ // Check that the stack is aligned.
+ void CheckStackAlignment();
+
// Verify restrictions about code generated in stubs.
void set_generating_stub(bool value) { generating_stub_ = value; }
bool generating_stub() { return generating_stub_; }
#elif V8_TARGET_ARCH_MIPS
return 8;
#endif
- // With gcc 4.4 the tree vectorization optimiser can generate code
+ // With gcc 4.4 the tree vectorization optimizer can generate code
// that requires 16 byte alignment such as movdqa on x86.
return 16;
}
// Complex results must be written to address passed as first argument.
// AMD64 calling convention: a struct of two pointers in rax+rdx
+ // Check stack alignment.
+ if (FLAG_debug_code) {
+ __ CheckStackAlignment();
+ }
+
if (do_gc) {
- // Pass failure code returned from last attempt as first argument to GC.
+ // Pass failure code returned from last attempt as first argument to
+ // PerformGC. No need to use PrepareCallCFunction/CallCFunction here as the
+ // stack is known to be aligned. This function takes one argument which is
+ // passed in register.
#ifdef _WIN64
__ movq(rcx, rax);
#else // ! defined(_WIN64)
}
+void MacroAssembler::CheckStackAlignment() {
+ int frame_alignment = OS::ActivationFrameAlignment();
+ int frame_alignment_mask = frame_alignment - 1;
+ if (frame_alignment > kPointerSize) {
+ ASSERT(IsPowerOf2(frame_alignment));
+ Label alignment_as_expected;
+ testq(rsp, Immediate(frame_alignment_mask));
+ j(zero, &alignment_as_expected);
+ // Abort if stack is not aligned.
+ int3();
+ bind(&alignment_as_expected);
+ }
+}
+
+
void MacroAssembler::NegativeZeroTest(Register result,
Register op,
Label* then_label) {
void MacroAssembler::CallCFunction(Register function, int num_arguments) {
+ // Check stack alignment.
+ if (FLAG_debug_code) {
+ CheckStackAlignment();
+ }
+
call(function);
ASSERT(OS::ActivationFrameAlignment() != 0);
ASSERT(num_arguments >= 0);
// Print a message to stdout and abort execution.
void Abort(const char* msg);
+ // Check that the stack is aligned.
+ void CheckStackAlignment();
+
// Verify restrictions about code generated in stubs.
void set_generating_stub(bool value) { generating_stub_ = value; }
bool generating_stub() { return generating_stub_; }