*pc_++ = (x)
+#ifdef GENERATED_CODE_COVERAGE
+static void InitCoverageLog();
+#endif
+
// spare_buffer_
static byte* spare_buffer_ = NULL;
current_position_ = RelocInfo::kNoPosition;
written_statement_position_ = current_statement_position_;
written_position_ = current_position_;
+#ifdef GENERATED_CODE_COVERAGE
+ InitCoverageLog();
+#endif
}
long_at_put(position, label_loc);
}
+
+#ifdef GENERATED_CODE_COVERAGE
+static FILE* coverage_log = NULL;
+
+
+static void InitCoverageLog() {
+ char* file_name = getenv("V8_GENERATED_CODE_COVERAGE_LOG");
+ if (file_name != NULL) {
+ coverage_log = fopen(file_name, "aw+");
+ }
+}
+
+
+void LogGeneratedCodeCoverage(const char* file_line) {
+ const char* return_address = (&file_line)[-1];
+ char* push_insn = const_cast<char*>(return_address - 12);
+ push_insn[0] = 0xeb; // Relative branch insn.
+ push_insn[1] = 13; // Skip over coverage insns.
+ if (coverage_log != NULL) {
+ fprintf(coverage_log, "%s\n", file_line);
+ fflush(coverage_log);
+ }
+}
+
+#endif
+
} } // namespace v8::internal
namespace v8 { namespace internal {
-#define __ DEFINE_MASM(masm)
+#define __ ACCESS_MASM(masm)
void Builtins::Generate_Adaptor(MacroAssembler* masm, CFunctionId id) {
namespace v8 { namespace internal {
-#define __ masm->
+#define __ ACCESS_MASM(masm)
void Builtins::Generate_Adaptor(MacroAssembler* masm, CFunctionId id) {
namespace v8 { namespace internal {
-#define __ DEFINE_MASM(masm_)
+#define __ ACCESS_MASM(masm_)
// -------------------------------------------------------------------------
#undef __
-#define __ DEFINE_MASM(masm)
+#define __ ACCESS_MASM(masm)
Handle<String> Reference::GetName() {
namespace v8 { namespace internal {
-#define __ masm_->
+#define __ ACCESS_MASM(masm_)
// -------------------------------------------------------------------------
// CodeGenState implementation.
// Add a label for checking the size of the code used for returning.
Label check_exit_codesize;
- __ bind(&check_exit_codesize);
+ masm_->bind(&check_exit_codesize);
// Leave the frame and return popping the arguments and the
// receiver.
frame_->Exit();
- __ ret((scope_->num_parameters() + 1) * kPointerSize);
+ masm_->ret((scope_->num_parameters() + 1) * kPointerSize);
DeleteFrame();
// Check that the size of the code used for returning matches what is
// expected by the debugger.
ASSERT_EQ(Debug::kIa32JSReturnSequenceLength,
- __ SizeOfCodeGeneratedSince(&check_exit_codesize));
+ masm_->SizeOfCodeGeneratedSince(&check_exit_codesize));
}
times_1, 0x0, RelocInfo::INTERNAL_REFERENCE));
smi_value.Unuse();
// Calculate address to overwrite later with actual address of table.
- int32_t jump_table_ref = __ pc_offset() - sizeof(int32_t);
+ int32_t jump_table_ref = masm_->pc_offset() - sizeof(int32_t);
__ Align(4);
Label table_start;
__ bind(&table_start);
// Loop up the context chain. There is no frame effect so it is
// safe to use raw labels here.
Label next, fast;
- if (!context.reg().is(tmp.reg())) __ mov(tmp.reg(), context.reg());
+ if (!context.reg().is(tmp.reg())) {
+ __ mov(tmp.reg(), context.reg());
+ }
__ bind(&next);
// Terminate at global context.
__ cmp(FieldOperand(tmp.reg(), HeapObject::kMapOffset),
// instruction.
ASSERT(value.is_register() && value.reg().is(eax));
// The delta from the start of the map-compare instruction to the
- // test eax instruction.
- int delta_to_patch_site = __ SizeOfCodeGeneratedSince(patch_site());
+ // test eax instruction. We use masm_ directly here instead of the
+ // __ macro because the __ macro sometimes uses macro expansion to turn
+ // into something that can't return a value. This is encountered when
+ // doing generated code coverage tests.
+ int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(patch_site());
__ test(value.reg(), Immediate(-delta_to_patch_site));
__ IncrementCounter(&Counters::keyed_load_inline_miss, 1);
#undef __
-#define __ masm->
+#define __ ACCESS_MASM(masm)
Handle<String> Reference::GetName() {
ASSERT(type_ == NAMED);
#undef __
-#define __ masm_->
+#define __ ACCESS_MASM(masm_)
Result DeferredInlineBinaryOperation::GenerateInlineCode(Result* left,
Result* right) {
#undef __
-#define __ masm->
+#define __ ACCESS_MASM(masm)
void GenericBinaryOpStub::GenerateSmiCode(MacroAssembler* masm, Label* slow) {
// Perform fast-case smi code for the operation (eax <op> ebx) and
}
// SHR should return uint32 - go to runtime for non-smi/negative result.
- if (op_ == Token::SHR) __ bind(&non_smi_result);
+ if (op_ == Token::SHR) {
+ __ bind(&non_smi_result);
+ }
__ mov(eax, Operand(esp, 1 * kPointerSize));
__ mov(edx, Operand(esp, 2 * kPointerSize));
break;
}
-#define __ DEFINE_MASM(masm)
+#define __ ACCESS_MASM(masm)
static void Generate_DebugBreakCallHelper(MacroAssembler* masm,
}
-#define __ masm->
+#define __ ACCESS_MASM(masm)
static void Generate_DebugBreakCallHelper(MacroAssembler* masm,
}
-#ifdef ARM_GENERATED_CODE_COVERAGE
-#define CODE_COVERAGE_STRINGIFY(x) #x
-#define CODE_COVERAGE_TOSTRING(x) CODE_COVERAGE_STRINGIFY(x)
-#define __FILE_LINE__ __FILE__ ":" CODE_COVERAGE_TOSTRING(__LINE__)
-#define DEFINE_MASM(masm) masm->stop(__FILE_LINE__); masm->
-#else
-#define DEFINE_MASM(masm) masm->
-#endif
-
-
} } // namespace v8::internal
#endif // V8_GLOBALS_H_
// Static IC stub generators.
//
-#define __ DEFINE_MASM(masm)
+#define __ ACCESS_MASM(masm)
// Helper function used from LoadIC/CallIC GenerateNormal.
// Static IC stub generators.
//
-#define __ masm->
+#define __ ACCESS_MASM(masm)
// Helper function used to load a property from a dictionary backing storage.
// Compute the masked index: (hash + i + i * i) & mask.
__ mov(r1, FieldOperand(name, String::kLengthOffset));
__ shr(r1, String::kHashShift);
- if (i > 0) __ add(Operand(r1), Immediate(Dictionary::GetProbeOffset(i)));
+ if (i > 0) {
+ __ add(Operand(r1), Immediate(Dictionary::GetProbeOffset(i)));
+ }
__ and_(r1, Operand(r2));
// Scale the index by multiplying by the element size.
// -------------------------------------------------------------------------
// JumpTarget implementation.
-#define __ masm_->
+#define __ ACCESS_MASM(masm_)
void JumpTarget::DoJump() {
ASSERT(cgen_ != NULL);
// -------------------------------------------------------------------------
// JumpTarget implementation.
-#define __ masm_->
+#define __ ACCESS_MASM(masm_)
void JumpTarget::DoJump() {
ASSERT(cgen_ != NULL);
}
+#ifdef GENERATED_CODE_COVERAGE
+#define CODE_COVERAGE_STRINGIFY(x) #x
+#define CODE_COVERAGE_TOSTRING(x) CODE_COVERAGE_STRINGIFY(x)
+#define __FILE_LINE__ __FILE__ ":" CODE_COVERAGE_TOSTRING(__LINE__)
+#define ACCESS_MASM(masm) masm->stop(__FILE_LINE__); masm->
+#else
+#define ACCESS_MASM(masm) masm->
+#endif
+
} } // namespace v8::internal
return Operand(object, index, scale, offset - kHeapObjectTag);
}
+
+#ifdef GENERATED_CODE_COVERAGE
+extern void LogGeneratedCodeCoverage(const char* file_line);
+#define CODE_COVERAGE_STRINGIFY(x) #x
+#define CODE_COVERAGE_TOSTRING(x) CODE_COVERAGE_STRINGIFY(x)
+#define __FILE_LINE__ __FILE__ ":" CODE_COVERAGE_TOSTRING(__LINE__)
+#define ACCESS_MASM(masm) { \
+ byte* ia32_coverage_function = \
+ reinterpret_cast<byte*>(FUNCTION_ADDR(LogGeneratedCodeCoverage)); \
+ masm->pushfd(); \
+ masm->pushad(); \
+ masm->push(Immediate(reinterpret_cast<int>(&__FILE_LINE__))); \
+ masm->call(ia32_coverage_function, RelocInfo::RUNTIME_ENTRY); \
+ masm->pop(eax); \
+ masm->popad(); \
+ masm->popfd(); \
+ } \
+ masm->
+#else
+#define ACCESS_MASM(masm) masm->
+#endif
+
+
} } // namespace v8::internal
#endif // V8_MACRO_ASSEMBLER_IA32_H_
* byte* stack_area_top)
*/
-#define __ masm_->
+#define __ ACCESS_MASM(masm_)
RegExpMacroAssemblerIA32::RegExpMacroAssemblerIA32(
Mode mode,
-#ifdef ARM_GENERATED_CODE_COVERAGE
+#ifdef GENERATED_CODE_COVERAGE
static FILE* coverage_log = NULL;
char* str = reinterpret_cast<char*>(instr->InstructionBits() & 0x0fffffff);
if (strlen(str) > 0) {
if (coverage_log != NULL) {
- fprintf(coverage_log, "Simulator hit %s\n", str);
+ fprintf(coverage_log, "%s\n", str);
fflush(coverage_log);
}
instr->SetInstructionBits(0xe1a00000); // Overwrite with nop.
sim_->set_pc(sim_->get_pc() + Instr::kInstrSize);
}
-#else // ndef ARM_GENERATED_CODE_COVERAGE
+#else // ndef GENERATED_CODE_COVERAGE
static void InitializeCoverage() {
}
namespace v8 { namespace internal {
-#define __ DEFINE_MASM(masm)
+#define __ ACCESS_MASM(masm)
static void ProbeTable(MacroAssembler* masm,
#undef __
-#define __ DEFINE_MASM(masm())
+#define __ ACCESS_MASM(masm())
Object* StubCompiler::CompileLazyCompile(Code::Flags flags) {
namespace v8 { namespace internal {
-#define __ masm->
+#define __ ACCESS_MASM(masm)
static void ProbeTable(MacroAssembler* masm,
// Check that the maps haven't changed.
Register reg =
- __ CheckMaps(object, receiver, holder, scratch1, scratch2, miss_label);
+ masm->CheckMaps(object, receiver, holder, scratch1, scratch2, miss_label);
// Get the value from the properties.
GenerateFastPropertyLoad(masm, eax, reg, holder, index);
// Check that the maps haven't changed.
Register reg =
- __ CheckMaps(object, receiver, holder, scratch1, scratch2, miss_label);
+ masm->CheckMaps(object, receiver, holder, scratch1, scratch2, miss_label);
// Push the arguments on the JS stack of the caller.
__ pop(scratch2); // remove return address
// Check that the maps haven't changed.
Register reg =
- __ CheckMaps(object, receiver, holder, scratch1, scratch2, miss_label);
+ masm->CheckMaps(object, receiver, holder, scratch1, scratch2, miss_label);
// Return the constant value.
__ mov(eax, Handle<Object>(value));
// Check that the maps haven't changed.
Register reg =
- __ CheckMaps(object, receiver, holder, scratch1, scratch2, miss_label);
+ masm->CheckMaps(object, receiver, holder, scratch1, scratch2, miss_label);
// Push the arguments on the JS stack of the caller.
__ pop(scratch2); // remove return address
#undef __
-#define __ masm()->
+#define __ ACCESS_MASM(masm())
// TODO(1241006): Avoid having lazy compile stubs specialized by the
// Do the right check and compute the holder register.
Register reg =
- __ CheckMaps(JSObject::cast(object), edx, holder, ebx, ecx, &miss);
+ masm()->CheckMaps(JSObject::cast(object), edx, holder, ebx, ecx, &miss);
GenerateFastPropertyLoad(masm(), edi, reg, holder, index);
// Check that maps have not changed and compute the holder register.
Register reg =
- __ CheckMaps(JSObject::cast(object), edx, holder, ebx, ecx, &miss);
+ masm()->CheckMaps(JSObject::cast(object), edx, holder, ebx, ecx, &miss);
// Enter an internal frame.
__ EnterInternalFrame();
// -------------------------------------------------------------------------
// VirtualFrame implementation.
-#define __ DEFINE_MASM(masm_)
+#define __ ACCESS_MASM(masm_)
// On entry to a function, the virtual frame already contains the
namespace v8 { namespace internal {
-#define __ masm_->
+#define __ ACCESS_MASM(masm_)
// -------------------------------------------------------------------------
// VirtualFrame implementation.