// construct call and normal call.
static void ArrayNativeCode(MacroAssembler* masm,
Label* call_generic_code) {
+ Counters* counters = masm->isolate()->counters();
Label argc_one_or_more, argc_two_or_more;
// Check for array construction with zero arguments or one.
r5,
JSArray::kPreallocatedArrayElements,
call_generic_code);
- __ IncrementCounter(COUNTERS->array_function_native(), 1, r3, r4);
+ __ IncrementCounter(counters->array_function_native(), 1, r3, r4);
// Setup return value, remove receiver from stack and return.
__ mov(r0, r2);
__ add(sp, sp, Operand(kPointerSize));
r7,
true,
call_generic_code);
- __ IncrementCounter(COUNTERS->array_function_native(), 1, r2, r4);
+ __ IncrementCounter(counters->array_function_native(), 1, r2, r4);
// Setup return value, remove receiver and argument from stack and return.
__ mov(r0, r3);
__ add(sp, sp, Operand(2 * kPointerSize));
r7,
false,
call_generic_code);
- __ IncrementCounter(COUNTERS->array_function_native(), 1, r2, r6);
+ __ IncrementCounter(counters->array_function_native(), 1, r2, r6);
// Fill arguments as array elements. Copy from the top of the stack (last
// element) to the array backing store filling it backwards. Note:
// -- sp[(argc - n - 1) * 4] : arg[n] (zero based)
// -- sp[argc * 4] : receiver
// -----------------------------------
- __ IncrementCounter(COUNTERS->string_ctor_calls(), 1, r2, r3);
+ Counters* counters = masm->isolate()->counters();
+ __ IncrementCounter(counters->string_ctor_calls(), 1, r2, r3);
Register function = r1;
if (FLAG_debug_code) {
r5, // Scratch.
false, // Is it a Smi?
¬_cached);
- __ IncrementCounter(COUNTERS->string_ctor_cached_number(), 1, r3, r4);
+ __ IncrementCounter(counters->string_ctor_cached_number(), 1, r3, r4);
__ bind(&argument_is_string);
// ----------- S t a t e -------------
__ tst(r3, Operand(kIsNotStringMask));
__ b(ne, &convert_argument);
__ mov(argument, r0);
- __ IncrementCounter(COUNTERS->string_ctor_conversions(), 1, r3, r4);
+ __ IncrementCounter(counters->string_ctor_conversions(), 1, r3, r4);
__ b(&argument_is_string);
// Invoke the conversion builtin and put the result into r2.
__ bind(&convert_argument);
__ push(function); // Preserve the function.
- __ IncrementCounter(COUNTERS->string_ctor_conversions(), 1, r3, r4);
+ __ IncrementCounter(counters->string_ctor_conversions(), 1, r3, r4);
__ EnterInternalFrame();
__ push(r0);
__ InvokeBuiltin(Builtins::TO_STRING, CALL_JS);
// At this point the argument is already a string. Call runtime to
// create a string wrapper.
__ bind(&gc_required);
- __ IncrementCounter(COUNTERS->string_ctor_gc_required(), 1, r3, r4);
+ __ IncrementCounter(counters->string_ctor_gc_required(), 1, r3, r4);
__ EnterInternalFrame();
__ push(argument);
__ CallRuntime(Runtime::kNewStringWrapper, 1);
// Should never count constructions for api objects.
ASSERT(!is_api_function || !count_constructions);
+ Isolate* isolate = masm->isolate();
+
// Enter a construct frame.
__ EnterConstructFrame();
Label undo_allocation;
#ifdef ENABLE_DEBUGGER_SUPPORT
ExternalReference debug_step_in_fp =
- ExternalReference::debug_step_in_fp_address(masm->isolate());
+ ExternalReference::debug_step_in_fp_address(isolate);
__ mov(r2, Operand(debug_step_in_fp));
__ ldr(r2, MemOperand(r2));
__ tst(r2, r2);
if (is_api_function) {
__ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
Handle<Code> code = Handle<Code>(
- masm->isolate()->builtins()->builtin(
- Builtins::HandleApiCallConstruct));
+ isolate->builtins()->builtin(Builtins::HandleApiCallConstruct));
ParameterCount expected(0);
__ InvokeCode(code, expected, expected,
RelocInfo::CODE_TARGET, CALL_FUNCTION);
__ LeaveConstructFrame();
__ add(sp, sp, Operand(r1, LSL, kPointerSizeLog2 - 1));
__ add(sp, sp, Operand(kPointerSize));
- __ IncrementCounter(COUNTERS->constructed_objects(), 1, r1, r2);
+ __ IncrementCounter(isolate->counters()->constructed_objects(), 1, r1, r2);
__ Jump(lr);
}
// number string cache for smis is just the smi value, and the hash for
// doubles is the xor of the upper and lower words. See
// Heap::GetNumberStringCache.
+ Isolate* isolate = masm->isolate();
Label is_smi;
Label load_result_from_cache;
if (!object_is_smi) {
__ JumpIfSmi(object, &is_smi);
- if (Isolate::Current()->cpu_features()->IsSupported(VFP3)) {
+ if (isolate->cpu_features()->IsSupported(VFP3)) {
CpuFeatures::Scope scope(VFP3);
__ CheckMap(object,
scratch1,
__ bind(&load_result_from_cache);
__ ldr(result,
FieldMemOperand(scratch, FixedArray::kHeaderSize + kPointerSize));
- __ IncrementCounter(COUNTERS->number_to_string_native(),
+ __ IncrementCounter(isolate->counters()->number_to_string_native(),
1,
scratch1,
scratch2);
__ bind(&both_loaded_as_doubles);
// The arguments have been converted to doubles and stored in d6 and d7, if
// VFP3 is supported, or in r0, r1, r2, and r3.
- if (Isolate::Current()->cpu_features()->IsSupported(VFP3)) {
+ Isolate* isolate = masm->isolate();
+ if (isolate->cpu_features()->IsSupported(VFP3)) {
__ bind(&lhs_not_nan);
CpuFeatures::Scope scope(VFP3);
Label no_nan;
__ JumpIfNonSmisNotBothSequentialAsciiStrings(lhs_, rhs_, r2, r3, &slow);
- __ IncrementCounter(COUNTERS->string_compare_native(), 1, r2, r3);
+ __ IncrementCounter(isolate->counters()->string_compare_native(), 1, r2, r3);
StringCompareStub::GenerateCompareFlatAsciiStrings(masm,
lhs_,
rhs_,
// subject: Subject string
// regexp_data: RegExp data (FixedArray)
// All checks done. Now push arguments for native regexp code.
- __ IncrementCounter(COUNTERS->regexp_entry_native(), 1, r0, r2);
+ __ IncrementCounter(isolate->counters()->regexp_entry_native(), 1, r0, r2);
// Isolates: note we add an additional parameter here (isolate pointer).
static const int kRegExpExecuteArguments = 8;
static const int kFromOffset = 1 * kPointerSize;
static const int kStringOffset = 2 * kPointerSize;
-
// Check bounds and smi-ness.
Register to = r6;
Register from = r7;
Label make_two_character_string;
StringHelper::GenerateTwoCharacterSymbolTableProbe(
masm, r3, r4, r1, r5, r6, r7, r9, &make_two_character_string);
- __ IncrementCounter(COUNTERS->sub_string_native(), 1, r3, r4);
+ Counters* counters = masm->isolate()->counters();
+ __ IncrementCounter(counters->sub_string_native(), 1, r3, r4);
__ add(sp, sp, Operand(3 * kPointerSize));
__ Ret();
__ bind(&make_two_character_string);
__ AllocateAsciiString(r0, r2, r4, r5, r9, &runtime);
__ strh(r3, FieldMemOperand(r0, SeqAsciiString::kHeaderSize));
- __ IncrementCounter(COUNTERS->sub_string_native(), 1, r3, r4);
+ __ IncrementCounter(counters->sub_string_native(), 1, r3, r4);
__ add(sp, sp, Operand(3 * kPointerSize));
__ Ret();
STATIC_ASSERT((SeqAsciiString::kHeaderSize & kObjectAlignmentMask) == 0);
StringHelper::GenerateCopyCharactersLong(masm, r1, r5, r2, r3, r4, r6, r7, r9,
COPY_ASCII | DEST_ALWAYS_ALIGNED);
- __ IncrementCounter(COUNTERS->sub_string_native(), 1, r3, r4);
+ __ IncrementCounter(counters->sub_string_native(), 1, r3, r4);
__ add(sp, sp, Operand(3 * kPointerSize));
__ Ret();
STATIC_ASSERT((SeqTwoByteString::kHeaderSize & kObjectAlignmentMask) == 0);
StringHelper::GenerateCopyCharactersLong(
masm, r1, r5, r2, r3, r4, r6, r7, r9, DEST_ALWAYS_ALIGNED);
- __ IncrementCounter(COUNTERS->sub_string_native(), 1, r3, r4);
+ __ IncrementCounter(counters->sub_string_native(), 1, r3, r4);
__ add(sp, sp, Operand(3 * kPointerSize));
__ Ret();
void StringCompareStub::Generate(MacroAssembler* masm) {
Label runtime;
+ Counters* counters = masm->isolate()->counters();
+
// Stack frame on entry.
// sp[0]: right string
// sp[4]: left string
STATIC_ASSERT(EQUAL == 0);
STATIC_ASSERT(kSmiTag == 0);
__ mov(r0, Operand(Smi::FromInt(EQUAL)));
- __ IncrementCounter(COUNTERS->string_compare_native(), 1, r1, r2);
+ __ IncrementCounter(counters->string_compare_native(), 1, r1, r2);
__ add(sp, sp, Operand(2 * kPointerSize));
__ Ret();
__ JumpIfNotBothSequentialAsciiStrings(r1, r0, r2, r3, &runtime);
// Compare flat ASCII strings natively. Remove arguments from stack first.
- __ IncrementCounter(COUNTERS->string_compare_native(), 1, r2, r3);
+ __ IncrementCounter(counters->string_compare_native(), 1, r2, r3);
__ add(sp, sp, Operand(2 * kPointerSize));
GenerateCompareFlatAsciiStrings(masm, r1, r0, r2, r3, r4, r5);
Label string_add_runtime, call_builtin;
Builtins::JavaScript builtin_id = Builtins::ADD;
+ Counters* counters = masm->isolate()->counters();
+
// Stack on entry:
// sp[0]: second argument (right).
// sp[4]: first argument (left).
__ cmp(r3, Operand(Smi::FromInt(0)), ne);
__ b(ne, &strings_not_empty); // If either string was empty, return r0.
- __ IncrementCounter(COUNTERS->string_add_native(), 1, r2, r3);
+ __ IncrementCounter(counters->string_add_native(), 1, r2, r3);
__ add(sp, sp, Operand(2 * kPointerSize));
__ Ret();
Label make_two_character_string;
StringHelper::GenerateTwoCharacterSymbolTableProbe(
masm, r2, r3, r6, r7, r4, r5, r9, &make_two_character_string);
- __ IncrementCounter(COUNTERS->string_add_native(), 1, r2, r3);
+ __ IncrementCounter(counters->string_add_native(), 1, r2, r3);
__ add(sp, sp, Operand(2 * kPointerSize));
__ Ret();
__ mov(r6, Operand(2));
__ AllocateAsciiString(r0, r6, r4, r5, r9, &string_add_runtime);
__ strh(r2, FieldMemOperand(r0, SeqAsciiString::kHeaderSize));
- __ IncrementCounter(COUNTERS->string_add_native(), 1, r2, r3);
+ __ IncrementCounter(counters->string_add_native(), 1, r2, r3);
__ add(sp, sp, Operand(2 * kPointerSize));
__ Ret();
__ str(r0, FieldMemOperand(r7, ConsString::kFirstOffset));
__ str(r1, FieldMemOperand(r7, ConsString::kSecondOffset));
__ mov(r0, Operand(r7));
- __ IncrementCounter(COUNTERS->string_add_native(), 1, r2, r3);
+ __ IncrementCounter(counters->string_add_native(), 1, r2, r3);
__ add(sp, sp, Operand(2 * kPointerSize));
__ Ret();
// r7: result string.
StringHelper::GenerateCopyCharacters(masm, r6, r1, r3, r4, true);
__ mov(r0, Operand(r7));
- __ IncrementCounter(COUNTERS->string_add_native(), 1, r2, r3);
+ __ IncrementCounter(counters->string_add_native(), 1, r2, r3);
__ add(sp, sp, Operand(2 * kPointerSize));
__ Ret();
StringHelper::GenerateCopyCharacters(masm, r6, r1, r3, r4, false);
__ mov(r0, Operand(r7));
- __ IncrementCounter(COUNTERS->string_add_native(), 1, r2, r3);
+ __ IncrementCounter(counters->string_add_native(), 1, r2, r3);
__ add(sp, sp, Operand(2 * kPointerSize));
__ Ret();
FastCloneShallowArrayStub stub(
FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS, length);
frame_->CallStub(&stub, 3);
- __ IncrementCounter(COUNTERS->cow_arrays_created_stub(), 1, r1, r2);
+ __ IncrementCounter(masm_->isolate()->counters()->cow_arrays_created_stub(),
+ 1, r1, r2);
} else if (node->depth() > 1) {
frame_->CallRuntime(Runtime::kCreateArrayLiteral, 3);
} else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) {
Register scratch1 = VirtualFrame::scratch0();
Register scratch2 = VirtualFrame::scratch1();
ASSERT(!receiver_.is(scratch1) && !receiver_.is(scratch2));
- __ DecrementCounter(COUNTERS->named_load_inline(), 1, scratch1, scratch2);
- __ IncrementCounter(COUNTERS->named_load_inline_miss(), 1,
- scratch1, scratch2);
+ __ DecrementCounter(masm_->isolate()->counters()->named_load_inline(),
+ 1, scratch1, scratch2);
+ __ IncrementCounter(masm_->isolate()->counters()->named_load_inline_miss(),
+ 1, scratch1, scratch2);
// Ensure receiver in r0 and name in r2 to match load ic calling convention.
__ Move(r0, receiver_);
Register scratch1 = VirtualFrame::scratch0();
Register scratch2 = VirtualFrame::scratch1();
- __ DecrementCounter(COUNTERS->keyed_load_inline(), 1, scratch1, scratch2);
- __ IncrementCounter(COUNTERS->keyed_load_inline_miss(),
- 1, scratch1, scratch2);
+ __ DecrementCounter(masm_->isolate()->counters()->keyed_load_inline(),
+ 1, scratch1, scratch2);
+ __ IncrementCounter(masm_->isolate()->counters()->keyed_load_inline_miss(),
+ 1, scratch1, scratch2);
// Ensure key in r0 and receiver in r1 to match keyed load ic calling
// convention.
void DeferredReferenceSetKeyedValue::Generate() {
Register scratch1 = VirtualFrame::scratch0();
Register scratch2 = VirtualFrame::scratch1();
- __ DecrementCounter(COUNTERS->keyed_store_inline(), 1, scratch1, scratch2);
- __ IncrementCounter(COUNTERS->keyed_store_inline_miss(),
+ __ DecrementCounter(masm_->isolate()->counters()->keyed_store_inline(),
+ 1, scratch1, scratch2);
+ __ IncrementCounter(masm_->isolate()->counters()->keyed_store_inline_miss(),
1, scratch1, scratch2);
// Ensure value in r0, key in r1 and receiver in r2 to match keyed store ic
// Counter will be decremented in the deferred code. Placed here to avoid
// having it in the instruction stream below where patching will occur.
if (is_contextual) {
- __ IncrementCounter(COUNTERS->named_load_global_inline(), 1,
- frame_->scratch0(), frame_->scratch1());
+ __ IncrementCounter(
+ masm_->isolate()->counters()->named_load_global_inline(),
+ 1, frame_->scratch0(), frame_->scratch1());
} else {
- __ IncrementCounter(COUNTERS->named_load_inline(), 1,
- frame_->scratch0(), frame_->scratch1());
+ __ IncrementCounter(masm_->isolate()->counters()->named_load_inline(),
+ 1, frame_->scratch0(), frame_->scratch1());
}
// The following instructions are the inlined load of an in-object property.
}
}
if (is_dont_delete) {
- __ IncrementCounter(COUNTERS->dont_delete_hint_hit(), 1,
- frame_->scratch0(), frame_->scratch1());
+ __ IncrementCounter(
+ masm_->isolate()->counters()->dont_delete_hint_hit(),
+ 1, frame_->scratch0(), frame_->scratch1());
}
}
// Counter will be decremented in the deferred code. Placed here to avoid
// having it in the instruction stream below where patching will occur.
- __ IncrementCounter(COUNTERS->keyed_load_inline(), 1,
- frame_->scratch0(), frame_->scratch1());
+ __ IncrementCounter(masm_->isolate()->counters()->keyed_load_inline(),
+ 1, frame_->scratch0(), frame_->scratch1());
// Load the key and receiver from the stack.
bool key_is_known_smi = frame_->KnownSmiAt(0);
// Counter will be decremented in the deferred code. Placed here to avoid
// having it in the instruction stream below where patching will occur.
- __ IncrementCounter(COUNTERS->keyed_store_inline(), 1,
- scratch1, scratch2);
+ __ IncrementCounter(masm_->isolate()->counters()->keyed_store_inline(),
+ 1, scratch1, scratch2);
// Load the value, key and receiver from the stack.
void FullCodeGenerator::EmitCallIC(Handle<Code> ic, RelocInfo::Mode mode) {
ASSERT(mode == RelocInfo::CODE_TARGET ||
mode == RelocInfo::CODE_TARGET_CONTEXT);
+ Counters* counters = isolate()->counters();
switch (ic->kind()) {
case Code::LOAD_IC:
- __ IncrementCounter(COUNTERS->named_load_full(), 1, r1, r2);
+ __ IncrementCounter(counters->named_load_full(), 1, r1, r2);
break;
case Code::KEYED_LOAD_IC:
- __ IncrementCounter(COUNTERS->keyed_load_full(), 1, r1, r2);
+ __ IncrementCounter(counters->keyed_load_full(), 1, r1, r2);
break;
case Code::STORE_IC:
- __ IncrementCounter(COUNTERS->named_store_full(), 1, r1, r2);
+ __ IncrementCounter(counters->named_store_full(), 1, r1, r2);
break;
case Code::KEYED_STORE_IC:
- __ IncrementCounter(COUNTERS->keyed_store_full(), 1, r1, r2);
+ __ IncrementCounter(counters->keyed_store_full(), 1, r1, r2);
default:
break;
}
void FullCodeGenerator::EmitCallIC(Handle<Code> ic, JumpPatchSite* patch_site) {
+ Counters* counters = isolate()->counters();
switch (ic->kind()) {
case Code::LOAD_IC:
- __ IncrementCounter(COUNTERS->named_load_full(), 1, r1, r2);
+ __ IncrementCounter(counters->named_load_full(), 1, r1, r2);
break;
case Code::KEYED_LOAD_IC:
- __ IncrementCounter(COUNTERS->keyed_load_full(), 1, r1, r2);
+ __ IncrementCounter(counters->keyed_load_full(), 1, r1, r2);
break;
case Code::STORE_IC:
- __ IncrementCounter(COUNTERS->named_store_full(), 1, r1, r2);
+ __ IncrementCounter(counters->named_store_full(), 1, r1, r2);
break;
case Code::KEYED_STORE_IC:
- __ IncrementCounter(COUNTERS->keyed_store_full(), 1, r1, r2);
+ __ IncrementCounter(counters->keyed_store_full(), 1, r1, r2);
default:
break;
}
// -- r2 : name
// -- lr : return address
// -----------------------------------
+ Isolate* isolate = masm->isolate();
if (id == IC::kCallIC_Miss) {
- __ IncrementCounter(COUNTERS->call_miss(), 1, r3, r4);
+ __ IncrementCounter(isolate->counters()->call_miss(), 1, r3, r4);
} else {
- __ IncrementCounter(COUNTERS->keyed_call_miss(), 1, r3, r4);
+ __ IncrementCounter(isolate->counters()->keyed_call_miss(), 1, r3, r4);
}
// Get the receiver of the function from the stack.
// Call the entry.
__ mov(r0, Operand(2));
- __ mov(r1, Operand(ExternalReference(IC_Utility(id), masm->isolate())));
+ __ mov(r1, Operand(ExternalReference(IC_Utility(id), isolate)));
CEntryStub stub(1);
__ CallStub(&stub);
GenerateFastArrayLoad(
masm, r1, r2, r4, r3, r0, r1, &check_number_dictionary, &slow_load);
- __ IncrementCounter(COUNTERS->keyed_call_generic_smi_fast(), 1, r0, r3);
+ Counters* counters = masm->isolate()->counters();
+ __ IncrementCounter(counters->keyed_call_generic_smi_fast(), 1, r0, r3);
__ bind(&do_call);
// receiver in r1 is not used after this point.
__ mov(r0, Operand(r2, ASR, kSmiTagSize));
// r0: untagged index
GenerateNumberDictionaryLoad(masm, &slow_load, r4, r2, r1, r0, r3, r5);
- __ IncrementCounter(COUNTERS->keyed_call_generic_smi_dict(), 1, r0, r3);
+ __ IncrementCounter(counters->keyed_call_generic_smi_dict(), 1, r0, r3);
__ jmp(&do_call);
__ bind(&slow_load);
// This branch is taken when calling KeyedCallIC_Miss is neither required
// nor beneficial.
- __ IncrementCounter(COUNTERS->keyed_call_generic_slow_load(), 1, r0, r3);
+ __ IncrementCounter(counters->keyed_call_generic_slow_load(), 1, r0, r3);
__ EnterInternalFrame();
__ push(r2); // save the key
__ Push(r1, r2); // pass the receiver and the key
__ b(ne, &lookup_monomorphic_cache);
GenerateDictionaryLoad(masm, &slow_load, r0, r2, r1, r3, r4);
- __ IncrementCounter(COUNTERS->keyed_call_generic_lookup_dict(), 1, r0, r3);
+ __ IncrementCounter(counters->keyed_call_generic_lookup_dict(), 1, r0, r3);
__ jmp(&do_call);
__ bind(&lookup_monomorphic_cache);
- __ IncrementCounter(COUNTERS->keyed_call_generic_lookup_cache(), 1, r0, r3);
+ __ IncrementCounter(counters->keyed_call_generic_lookup_cache(), 1, r0, r3);
GenerateMonomorphicCacheProbe(masm, argc, Code::KEYED_CALL_IC);
// Fall through on miss.
// - the value loaded is not a function,
// - there is hope that the runtime will create a monomorphic call stub
// that will get fetched next time.
- __ IncrementCounter(COUNTERS->keyed_call_generic_slow(), 1, r0, r3);
+ __ IncrementCounter(counters->keyed_call_generic_slow(), 1, r0, r3);
GenerateMiss(masm, argc);
__ bind(&index_string);
// -- r0 : receiver
// -- sp[0] : receiver
// -----------------------------------
+ Isolate* isolate = masm->isolate();
- __ IncrementCounter(COUNTERS->load_miss(), 1, r3, r4);
+ __ IncrementCounter(isolate->counters()->load_miss(), 1, r3, r4);
__ mov(r3, r0);
__ Push(r3, r2);
// Perform tail call to the entry.
ExternalReference ref =
- ExternalReference(IC_Utility(kLoadIC_Miss), masm->isolate());
+ ExternalReference(IC_Utility(kLoadIC_Miss), isolate);
__ TailCallExternalReference(ref, 2, 1);
}
// -- r0 : key
// -- r1 : receiver
// -----------------------------------
+ Isolate* isolate = masm->isolate();
- __ IncrementCounter(COUNTERS->keyed_load_miss(), 1, r3, r4);
+ __ IncrementCounter(isolate->counters()->keyed_load_miss(), 1, r3, r4);
__ Push(r1, r0);
ExternalReference ref =
- ExternalReference(IC_Utility(kKeyedLoadIC_Miss), masm->isolate());
+ ExternalReference(IC_Utility(kKeyedLoadIC_Miss), isolate);
__ TailCallExternalReference(ref, 2, 1);
}
Register key = r0;
Register receiver = r1;
+ Isolate* isolate = masm->isolate();
+
// Check that the key is a smi.
__ JumpIfNotSmi(key, &check_string);
__ bind(&index_smi);
GenerateFastArrayLoad(
masm, receiver, key, r4, r3, r2, r0, NULL, &slow);
- __ IncrementCounter(COUNTERS->keyed_load_generic_smi(), 1, r2, r3);
+ __ IncrementCounter(isolate->counters()->keyed_load_generic_smi(), 1, r2, r3);
__ Ret();
__ bind(&check_number_dictionary);
// Slow case, key and receiver still in r0 and r1.
__ bind(&slow);
- __ IncrementCounter(COUNTERS->keyed_load_generic_slow(), 1, r2, r3);
+ __ IncrementCounter(isolate->counters()->keyed_load_generic_slow(),
+ 1, r2, r3);
GenerateRuntimeGetProperty(masm);
__ bind(&check_string);
// Load the key (consisting of map and symbol) from the cache and
// check for match.
ExternalReference cache_keys =
- ExternalReference::keyed_lookup_cache_keys(masm->isolate());
+ ExternalReference::keyed_lookup_cache_keys(isolate);
__ mov(r4, Operand(cache_keys));
__ add(r4, r4, Operand(r3, LSL, kPointerSizeLog2 + 1));
__ ldr(r5, MemOperand(r4, kPointerSize, PostIndex)); // Move r4 to symbol.
// r2 : receiver's map
// r3 : lookup cache index
ExternalReference cache_field_offsets =
- ExternalReference::keyed_lookup_cache_field_offsets(masm->isolate());
+ ExternalReference::keyed_lookup_cache_field_offsets(isolate);
__ mov(r4, Operand(cache_field_offsets));
__ ldr(r5, MemOperand(r4, r3, LSL, kPointerSizeLog2));
__ ldrb(r6, FieldMemOperand(r2, Map::kInObjectPropertiesOffset));
__ add(r6, r6, r5); // Index from start of object.
__ sub(r1, r1, Operand(kHeapObjectTag)); // Remove the heap tag.
__ ldr(r0, MemOperand(r1, r6, LSL, kPointerSizeLog2));
- __ IncrementCounter(COUNTERS->keyed_load_generic_lookup_cache(), 1, r2, r3);
+ __ IncrementCounter(isolate->counters()->keyed_load_generic_lookup_cache(),
+ 1, r2, r3);
__ Ret();
// Load property array property.
__ ldr(r1, FieldMemOperand(r1, JSObject::kPropertiesOffset));
__ add(r1, r1, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
__ ldr(r0, MemOperand(r1, r5, LSL, kPointerSizeLog2));
- __ IncrementCounter(COUNTERS->keyed_load_generic_lookup_cache(), 1, r2, r3);
+ __ IncrementCounter(isolate->counters()->keyed_load_generic_lookup_cache(),
+ 1, r2, r3);
__ Ret();
// Do a quick inline probe of the receiver's dictionary, if it
GenerateGlobalInstanceTypeCheck(masm, r2, &slow);
// Load the property to r0.
GenerateDictionaryLoad(masm, &slow, r3, r0, r0, r2, r4);
- __ IncrementCounter(COUNTERS->keyed_load_generic_symbol(), 1, r2, r3);
+ __ IncrementCounter(isolate->counters()->keyed_load_generic_symbol(),
+ 1, r2, r3);
__ Ret();
__ bind(&index_string);
GenerateStringDictionaryReceiverCheck(masm, r1, r3, r4, r5, &miss);
GenerateDictionaryStore(masm, &miss, r3, r2, r0, r4, r5);
- __ IncrementCounter(COUNTERS->store_normal_hit(), 1, r4, r5);
+ Counters* counters = masm->isolate()->counters();
+ __ IncrementCounter(counters->store_normal_hit(),
+ 1, r4, r5);
__ Ret();
__ bind(&miss);
- __ IncrementCounter(COUNTERS->store_normal_miss(), 1, r4, r5);
+ __ IncrementCounter(counters->store_normal_miss(), 1, r4, r5);
GenerateMiss(masm);
}
Register scratch0,
Register scratch1) {
ASSERT(name->IsSymbol());
- __ IncrementCounter(COUNTERS->negative_lookups(), 1, scratch0, scratch1);
- __ IncrementCounter(COUNTERS->negative_lookups_miss(), 1, scratch0, scratch1);
+ Counters* counters = masm->isolate()->counters();
+ __ IncrementCounter(counters->negative_lookups(), 1, scratch0, scratch1);
+ __ IncrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1);
Label done;
}
}
__ bind(&done);
- __ DecrementCounter(COUNTERS->negative_lookups_miss(), 1, scratch0, scratch1);
+ __ DecrementCounter(counters->negative_lookups_miss(), 1, scratch0, scratch1);
}
ASSERT(optimization.is_constant_call());
ASSERT(!lookup->holder()->IsGlobalObject());
+ Counters* counters = masm->isolate()->counters();
+
int depth1 = kInvalidProtoDepth;
int depth2 = kInvalidProtoDepth;
bool can_do_fast_api_call = false;
(depth2 != kInvalidProtoDepth);
}
- __ IncrementCounter(COUNTERS->call_const_interceptor(), 1,
+ __ IncrementCounter(counters->call_const_interceptor(), 1,
scratch1, scratch2);
if (can_do_fast_api_call) {
- __ IncrementCounter(COUNTERS->call_const_interceptor_fast_api(), 1,
+ __ IncrementCounter(counters->call_const_interceptor_fast_api(), 1,
scratch1, scratch2);
ReserveSpaceForFastApiCall(masm, scratch1);
}
JSGlobalPropertyCell* cell,
JSFunction* function,
String* name) {
+ Isolate* isolate = masm()->isolate();
+ Heap* heap = isolate->heap();
+ Counters* counters = isolate->counters();
+
ASSERT(optimization.is_simple_api_call());
// Bail out if object is a global object as we don't want to
// repatch it to global receiver.
- if (object->IsGlobalObject()) return HEAP->undefined_value();
- if (cell != NULL) return HEAP->undefined_value();
+ if (object->IsGlobalObject()) return heap->undefined_value();
+ if (cell != NULL) return heap->undefined_value();
int depth = optimization.GetPrototypeDepthOfExpectedType(
JSObject::cast(object), holder);
- if (depth == kInvalidProtoDepth) return HEAP->undefined_value();
+ if (depth == kInvalidProtoDepth) return heap->undefined_value();
Label miss, miss_before_stack_reserved;
__ tst(r1, Operand(kSmiTagMask));
__ b(eq, &miss_before_stack_reserved);
- __ IncrementCounter(COUNTERS->call_const(), 1, r0, r3);
- __ IncrementCounter(COUNTERS->call_const_fast_api(), 1, r0, r3);
+ __ IncrementCounter(counters->call_const(), 1, r0, r3);
+ __ IncrementCounter(counters->call_const_fast_api(), 1, r0, r3);
ReserveSpaceForFastApiCall(masm(), r0);
SharedFunctionInfo* function_info = function->shared();
switch (check) {
case RECEIVER_MAP_CHECK:
- __ IncrementCounter(COUNTERS->call_const(), 1, r0, r3);
+ __ IncrementCounter(masm()->isolate()->counters()->call_const(),
+ 1, r0, r3);
// Check that the maps haven't changed.
CheckPrototypes(JSObject::cast(object), r1, holder, r0, r3, r4, name,
__ ldr(cp, FieldMemOperand(r1, JSFunction::kContextOffset));
// Jump to the cached code (tail call).
- __ IncrementCounter(COUNTERS->call_global_inline(), 1, r3, r4);
+ Counters* counters = masm()->isolate()->counters();
+ __ IncrementCounter(counters->call_global_inline(), 1, r3, r4);
ASSERT(function->is_compiled());
Handle<Code> code(function->code());
ParameterCount expected(function->shared()->formal_parameter_count());
// Handle call cache miss.
__ bind(&miss);
- __ IncrementCounter(COUNTERS->call_global_inline_miss(), 1, r1, r3);
+ __ IncrementCounter(counters->call_global_inline_miss(), 1, r1, r3);
MaybeObject* maybe_result = GenerateMissBranch();
if (maybe_result->IsFailure()) return maybe_result;
// Store the value in the cell.
__ str(r0, FieldMemOperand(r4, JSGlobalPropertyCell::kValueOffset));
- __ IncrementCounter(COUNTERS->named_store_global_inline(), 1, r4, r3);
+ Counters* counters = masm()->isolate()->counters();
+ __ IncrementCounter(counters->named_store_global_inline(), 1, r4, r3);
__ Ret();
// Handle store cache miss.
__ bind(&miss);
- __ IncrementCounter(COUNTERS->named_store_global_inline_miss(), 1, r4, r3);
+ __ IncrementCounter(counters->named_store_global_inline_miss(), 1, r4, r3);
Handle<Code> ic(Isolate::Current()->builtins()->builtin(
Builtins::StoreIC_Miss));
__ Jump(ic, RelocInfo::CODE_TARGET);
}
__ mov(r0, r4);
- __ IncrementCounter(COUNTERS->named_load_global_stub(), 1, r1, r3);
+ Counters* counters = masm()->isolate()->counters();
+ __ IncrementCounter(counters->named_load_global_stub(), 1, r1, r3);
__ Ret();
__ bind(&miss);
- __ IncrementCounter(COUNTERS->named_load_global_stub_miss(), 1, r1, r3);
+ __ IncrementCounter(counters->named_load_global_stub_miss(), 1, r1, r3);
GenerateLoadMiss(masm(), Code::LOAD_IC);
// Return the generated code.
// -- r1 : receiver
// -----------------------------------
Label miss;
- __ IncrementCounter(COUNTERS->keyed_load_string_length(), 1, r2, r3);
+
+ Counters* counters = masm()->isolate()->counters();
+ __ IncrementCounter(counters->keyed_load_string_length(), 1, r2, r3);
// Check the key is the cached one.
__ cmp(r0, Operand(Handle<String>(name)));
GenerateLoadStringLength(masm(), r1, r2, r3, &miss, true);
__ bind(&miss);
- __ DecrementCounter(COUNTERS->keyed_load_string_length(), 1, r2, r3);
+ __ DecrementCounter(counters->keyed_load_string_length(), 1, r2, r3);
GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
// -----------------------------------
Label miss;
- __ IncrementCounter(COUNTERS->keyed_load_function_prototype(), 1, r2, r3);
+ Counters* counters = masm()->isolate()->counters();
+ __ IncrementCounter(counters->keyed_load_function_prototype(), 1, r2, r3);
// Check the name hasn't changed.
__ cmp(r0, Operand(Handle<String>(name)));
GenerateLoadFunctionPrototype(masm(), r1, r2, r3, &miss);
__ bind(&miss);
- __ DecrementCounter(COUNTERS->keyed_load_function_prototype(), 1, r2, r3);
+ __ DecrementCounter(counters->keyed_load_function_prototype(), 1, r2, r3);
GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
return GetCode(CALLBACKS, name);
// -----------------------------------
Label miss;
- __ IncrementCounter(COUNTERS->keyed_store_field(), 1, r3, r4);
+ Counters* counters = masm()->isolate()->counters();
+ __ IncrementCounter(counters->keyed_store_field(), 1, r3, r4);
// Check that the name has not changed.
__ cmp(r1, Operand(Handle<String>(name)));
&miss);
__ bind(&miss);
- __ DecrementCounter(COUNTERS->keyed_store_field(), 1, r3, r4);
+ __ DecrementCounter(counters->keyed_store_field(), 1, r3, r4);
Handle<Code> ic(Isolate::Current()->builtins()->builtin(
Builtins::KeyedStoreIC_Miss));
// Remove caller arguments and receiver from the stack and return.
__ add(sp, sp, Operand(r1, LSL, kPointerSizeLog2));
__ add(sp, sp, Operand(kPointerSize));
- __ IncrementCounter(COUNTERS->constructed_objects(), 1, r1, r2);
- __ IncrementCounter(COUNTERS->constructed_objects_stub(), 1, r1, r2);
+ Counters* counters = masm()->isolate()->counters();
+ __ IncrementCounter(counters->constructed_objects(), 1, r1, r2);
+ __ IncrementCounter(counters->constructed_objects_stub(), 1, r1, r2);
__ Jump(lr);
// Jump to the generic stub in case the specialized code cannot handle the
// Slow case, key and receiver still in r0 and r1.
__ bind(&slow);
- __ IncrementCounter(COUNTERS->keyed_load_external_array_slow(), 1, r2, r3);
+ __ IncrementCounter(
+ masm()->isolate()->counters()->keyed_load_external_array_slow(),
+ 1, r2, r3);
// ---------- S t a t e --------------
// -- lr : return address
#ifdef DEBUG
byte* begin_pos = pos_;
#endif
- COUNTERS->reloc_info_count()->Increment();
ASSERT(rinfo->pc() - last_pc_ >= 0);
ASSERT(RelocInfo::NUMBER_OF_MODES <= kMaxRelocModes);
// Use unsigned delta-encoding for pc.
void CodeStub::GenerateCode(MacroAssembler* masm) {
// Update the static counter each time a new code stub is generated.
- COUNTERS->code_stubs()->Increment();
+ masm->isolate()->counters()->code_stubs()->Increment();
// Nested stubs are not allowed for leafs.
AllowStubCallsScope allow_scope(masm, AllowsStubCalls());
void CodeStub::RecordCodeGeneration(Code* code, MacroAssembler* masm) {
code->set_major_key(MajorKey());
- PROFILE(ISOLATE, CodeCreateEvent(Logger::STUB_TAG, code, GetName()));
+ Isolate* isolate = masm->isolate();
+ PROFILE(isolate, CodeCreateEvent(Logger::STUB_TAG, code, GetName()));
GDBJIT(AddCode(GDBJITInterface::STUB, GetName(), code));
- COUNTERS->total_stubs_code_size()->Increment(code->instruction_size());
+ Counters* counters = isolate->counters();
+ counters->total_stubs_code_size()->Increment(code->instruction_size());
#ifdef ENABLE_DISASSEMBLER
if (FLAG_print_code_stubs) {
Handle<Code> CodeGenerator::MakeCodeEpilogue(MacroAssembler* masm,
Code::Flags flags,
CompilationInfo* info) {
+ Isolate* isolate = info->isolate();
+
// Allocate and install the code.
CodeDesc desc;
masm->GetCode(&desc);
- Handle<Code> code = FACTORY->NewCode(desc, flags, masm->CodeObject());
+ Handle<Code> code =
+ isolate->factory()->NewCode(desc, flags, masm->CodeObject());
if (!code.is_null()) {
- COUNTERS->total_compiled_code_size()->Increment(code->instruction_size());
+ isolate->counters()->total_compiled_code_size()->Increment(
+ code->instruction_size());
}
return code;
}
Handle<Script> script = info->script();
if (!script->IsUndefined() && !script->source()->IsUndefined()) {
int len = String::cast(script->source())->length();
- COUNTERS->total_old_codegen_source_size()->Increment(len);
+ Counters* counters = info->isolate()->counters();
+ counters->total_old_codegen_source_size()->Increment(len);
}
if (FLAG_trace_codegen) {
PrintF("Classic Compiler - ");
// rest of the function into account to avoid overlap with the
// parsing statistics.
HistogramTimer* rate = info->is_eval()
- ? COUNTERS->compile_eval()
- : COUNTERS->compile();
+ ? info->isolate()->counters()->compile_eval()
+ : info->isolate()->counters()->compile();
HistogramTimerScope timer(rate);
// Compile the code.
ScriptDataImpl* input_pre_data,
Handle<Object> script_data,
NativesFlag natives) {
- Isolate* isolate = Isolate::Current();
+ Isolate* isolate = source->GetIsolate();
int source_length = source->length();
- COUNTERS->total_load_size()->Increment(source_length);
- COUNTERS->total_compile_size()->Increment(source_length);
+ isolate->counters()->total_load_size()->Increment(source_length);
+ isolate->counters()->total_compile_size()->Increment(source_length);
// The VM is in the COMPILER state until exiting this function.
VMState state(isolate, COMPILER);
PcToCodeCache::PcToCodeCacheEntry* PcToCodeCache::GetCacheEntry(Address pc) {
- COUNTERS->pc_to_code()->Increment();
+ isolate_->counters()->pc_to_code()->Increment();
ASSERT(IsPowerOf2(kPcToCodeCacheSize));
uint32_t hash = ComputeIntegerHash(
static_cast<uint32_t>(reinterpret_cast<uintptr_t>(pc)));
uint32_t index = hash & (kPcToCodeCacheSize - 1);
PcToCodeCacheEntry* entry = cache(index);
if (entry->pc == pc) {
- COUNTERS->pc_to_code_cached()->Increment();
+ isolate_->counters()->pc_to_code_cached()->Increment();
ASSERT(entry->code == GcSafeFindCodeForPc(pc));
} else {
// Because this code may be interrupted by a profiling signal that
desc->instr_size = pc_offset();
desc->reloc_size = (buffer_ + buffer_size_) - reloc_info_writer.pos();
desc->origin = this;
-
- COUNTERS->reloc_info_size()->Increment(desc->reloc_size);
}
__ pop(ecx);
__ lea(esp, Operand(esp, ebx, times_2, 1 * kPointerSize)); // 1 ~ receiver
__ push(ecx);
- __ IncrementCounter(COUNTERS->constructed_objects(), 1);
+ __ IncrementCounter(masm->isolate()->counters()->constructed_objects(), 1);
__ ret(0);
}
edi,
kPreallocatedArrayElements,
&prepare_generic_code_call);
- __ IncrementCounter(COUNTERS->array_function_native(), 1);
+ __ IncrementCounter(masm->isolate()->counters()->array_function_native(), 1);
__ pop(ebx);
if (construct_call) {
__ pop(edi);
edi,
true,
&prepare_generic_code_call);
- __ IncrementCounter(COUNTERS->array_function_native(), 1);
+ Counters* counters = masm->isolate()->counters();
+ __ IncrementCounter(counters->array_function_native(), 1);
__ mov(eax, ebx);
__ pop(ebx);
if (construct_call) {
edi,
false,
&prepare_generic_code_call);
- __ IncrementCounter(COUNTERS->array_function_native(), 1);
+ __ IncrementCounter(counters->array_function_native(), 1);
__ mov(eax, ebx);
__ pop(ebx);
if (construct_call) {
// -- esp[(argc - n) * 4] : arg[n] (zero-based)
// -- esp[(argc + 1) * 4] : receiver
// -----------------------------------
- __ IncrementCounter(COUNTERS->string_ctor_calls(), 1);
+ Counters* counters = masm->isolate()->counters();
+ __ IncrementCounter(counters->string_ctor_calls(), 1);
if (FLAG_debug_code) {
__ LoadGlobalFunction(Context::STRING_FUNCTION_INDEX, ecx);
edx, // Scratch 2.
false, // Input is known to be smi?
¬_cached);
- __ IncrementCounter(COUNTERS->string_ctor_cached_number(), 1);
+ __ IncrementCounter(counters->string_ctor_cached_number(), 1);
__ bind(&argument_is_string);
// ----------- S t a t e -------------
// -- ebx : argument converted to string
Condition is_string = masm->IsObjectStringType(eax, ebx, ecx);
__ j(NegateCondition(is_string), &convert_argument);
__ mov(ebx, eax);
- __ IncrementCounter(COUNTERS->string_ctor_string_value(), 1);
+ __ IncrementCounter(counters->string_ctor_string_value(), 1);
__ jmp(&argument_is_string);
// Invoke the conversion builtin and put the result into ebx.
__ bind(&convert_argument);
- __ IncrementCounter(COUNTERS->string_ctor_conversions(), 1);
+ __ IncrementCounter(counters->string_ctor_conversions(), 1);
__ EnterInternalFrame();
__ push(edi); // Preserve the function.
__ push(eax);
// At this point the argument is already a string. Call runtime to
// create a string wrapper.
__ bind(&gc_required);
- __ IncrementCounter(COUNTERS->string_ctor_gc_required(), 1);
+ __ IncrementCounter(counters->string_ctor_gc_required(), 1);
__ EnterInternalFrame();
__ push(ebx);
__ CallRuntime(Runtime::kNewStringWrapper, 1);
// -----------------------------------
Label invoke, dont_adapt_arguments;
- __ IncrementCounter(COUNTERS->arguments_adaptors(), 1);
+ __ IncrementCounter(masm->isolate()->counters()->arguments_adaptors(), 1);
Label enough, too_few;
__ cmp(eax, Operand(ebx));
Label fill;
__ bind(&fill);
__ inc(ecx);
- __ push(Immediate(FACTORY->undefined_value()));
+ __ push(Immediate(masm->isolate()->factory()->undefined_value()));
__ cmp(ecx, Operand(ebx));
__ j(less, &fill);
// Update flags to indicate that arguments are in registers.
SetArgsInRegisters();
- __ IncrementCounter(COUNTERS->generic_binary_stub_calls_regs(), 1);
+ __ IncrementCounter(
+ masm->isolate()->counters()->generic_binary_stub_calls_regs(), 1);
}
// Call the stub.
// Update flags to indicate that arguments are in registers.
SetArgsInRegisters();
- __ IncrementCounter(COUNTERS->generic_binary_stub_calls_regs(), 1);
+ __ IncrementCounter(
+ masm->isolate()->counters()->generic_binary_stub_calls_regs(), 1);
}
// Call the stub.
}
// Update flags to indicate that arguments are in registers.
SetArgsInRegisters();
- __ IncrementCounter(COUNTERS->generic_binary_stub_calls_regs(), 1);
+ Counters* counters = masm->isolate()->counters();
+ __ IncrementCounter(counters->generic_binary_stub_calls_regs(), 1);
}
// Call the stub.
void GenericBinaryOpStub::Generate(MacroAssembler* masm) {
Label call_runtime;
- __ IncrementCounter(COUNTERS->generic_binary_stub_calls(), 1);
+ Counters* counters = masm->isolate()->counters();
+ __ IncrementCounter(counters->generic_binary_stub_calls(), 1);
if (runtime_operands_type_ == BinaryOpIC::UNINIT_OR_SMI) {
Label slow;
void TypeRecordingBinaryOpStub::GenerateGeneric(MacroAssembler* masm) {
Label call_runtime;
- __ IncrementCounter(COUNTERS->generic_binary_stub_calls(), 1);
+ Counters* counters = masm->isolate()->counters();
+ __ IncrementCounter(counters->generic_binary_stub_calls(), 1);
switch (op_) {
case Token::ADD:
// edx: code
// edi: encoding of subject string (1 if ascii 0 if two_byte);
// All checks done. Now push arguments for native regexp code.
- __ IncrementCounter(COUNTERS->regexp_entry_native(), 1);
+ Counters* counters = masm->isolate()->counters();
+ __ IncrementCounter(counters->regexp_entry_native(), 1);
// Isolates: note we add an additional parameter here (isolate pointer).
static const int kRegExpExecuteArguments = 8;
index,
times_twice_pointer_size,
FixedArray::kHeaderSize + kPointerSize));
- __ IncrementCounter(COUNTERS->number_to_string_native(), 1);
+ Counters* counters = masm->isolate()->counters();
+ __ IncrementCounter(counters->number_to_string_native(), 1);
}
__ test(ecx, Operand(ecx));
__ j(not_zero, &second_not_zero_length);
// Second string is empty, result is first string which is already in eax.
- __ IncrementCounter(COUNTERS->string_add_native(), 1);
+ Counters* counters = masm->isolate()->counters();
+ __ IncrementCounter(counters->string_add_native(), 1);
__ ret(2 * kPointerSize);
__ bind(&second_not_zero_length);
__ mov(ebx, FieldOperand(eax, String::kLengthOffset));
__ j(not_zero, &both_not_zero_length);
// First string is empty, result is second string which is in edx.
__ mov(eax, edx);
- __ IncrementCounter(COUNTERS->string_add_native(), 1);
+ __ IncrementCounter(counters->string_add_native(), 1);
__ ret(2 * kPointerSize);
// Both strings are non-empty.
StringHelper::GenerateTwoCharacterSymbolTableProbe(
masm, ebx, ecx, eax, edx, edi,
&make_two_character_string_no_reload, &make_two_character_string);
- __ IncrementCounter(COUNTERS->string_add_native(), 1);
+ __ IncrementCounter(counters->string_add_native(), 1);
__ ret(2 * kPointerSize);
// Allocate a two character string.
__ movzx_b(ebx, FieldOperand(eax, SeqAsciiString::kHeaderSize));
__ movzx_b(ecx, FieldOperand(edx, SeqAsciiString::kHeaderSize));
__ bind(&make_two_character_string_no_reload);
- __ IncrementCounter(COUNTERS->string_add_make_two_char(), 1);
+ __ IncrementCounter(counters->string_add_make_two_char(), 1);
__ AllocateAsciiString(eax, // Result.
2, // Length.
edi, // Scratch 1.
__ or_(ebx, Operand(ecx));
// Set the characters in the new string.
__ mov_w(FieldOperand(eax, SeqAsciiString::kHeaderSize), ebx);
- __ IncrementCounter(COUNTERS->string_add_native(), 1);
+ __ IncrementCounter(counters->string_add_native(), 1);
__ ret(2 * kPointerSize);
__ bind(&longer_than_two);
__ mov(FieldOperand(ecx, ConsString::kFirstOffset), eax);
__ mov(FieldOperand(ecx, ConsString::kSecondOffset), edx);
__ mov(eax, ecx);
- __ IncrementCounter(COUNTERS->string_add_native(), 1);
+ __ IncrementCounter(counters->string_add_native(), 1);
__ ret(2 * kPointerSize);
__ bind(&non_ascii);
// At least one of the strings is two-byte. Check whether it happens
// edx: first char of second argument
// edi: length of second argument
StringHelper::GenerateCopyCharacters(masm, ecx, edx, edi, ebx, true);
- __ IncrementCounter(COUNTERS->string_add_native(), 1);
+ __ IncrementCounter(counters->string_add_native(), 1);
__ ret(2 * kPointerSize);
// Handle creating a flat two byte result.
// edx: first char of second argument
// edi: length of second argument
StringHelper::GenerateCopyCharacters(masm, ecx, edx, edi, ebx, false);
- __ IncrementCounter(COUNTERS->string_add_native(), 1);
+ __ IncrementCounter(counters->string_add_native(), 1);
__ ret(2 * kPointerSize);
// Just jump to runtime to add the two strings.
// esi: character of sub string start
StringHelper::GenerateCopyCharactersREP(masm, edi, esi, ecx, ebx, true);
__ mov(esi, edx); // Restore esi.
- __ IncrementCounter(COUNTERS->sub_string_native(), 1);
+ Counters* counters = masm->isolate()->counters();
+ __ IncrementCounter(counters->sub_string_native(), 1);
__ ret(3 * kPointerSize);
__ bind(&non_ascii_flat);
__ mov(esi, edx); // Restore esi.
__ bind(&return_eax);
- __ IncrementCounter(COUNTERS->sub_string_native(), 1);
+ __ IncrementCounter(counters->sub_string_native(), 1);
__ ret(3 * kPointerSize);
// Just jump to runtime to create the sub string.
Label result_greater;
Label compare_lengths;
- __ IncrementCounter(COUNTERS->string_compare_native(), 1);
+ Counters* counters = masm->isolate()->counters();
+ __ IncrementCounter(counters->string_compare_native(), 1);
// Find minimum length.
NearLabel left_shorter;
STATIC_ASSERT(EQUAL == 0);
STATIC_ASSERT(kSmiTag == 0);
__ Set(eax, Immediate(Smi::FromInt(EQUAL)));
- __ IncrementCounter(COUNTERS->string_compare_native(), 1);
+ __ IncrementCounter(masm->isolate()->counters()->string_compare_native(), 1);
__ ret(2 * kPointerSize);
__ bind(¬_same);
FastCloneShallowArrayStub stub(
FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS, length);
clone = frame_->CallStub(&stub, 3);
- __ IncrementCounter(COUNTERS->cow_arrays_created_stub(), 1);
+ Counters* counters = masm()->isolate()->counters();
+ __ IncrementCounter(counters->cow_arrays_created_stub(), 1);
} else if (node->depth() > 1) {
clone = frame_->CallRuntime(Runtime::kCreateArrayLiteral, 3);
} else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) {
int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(patch_site());
// Here we use masm_-> instead of the __ macro because this is the
// instruction that gets patched and coverage code gets in the way.
+ Counters* counters = masm()->isolate()->counters();
if (is_contextual_) {
masm_->mov(is_dont_delete_ ? edx : ecx, -delta_to_patch_site);
- __ IncrementCounter(COUNTERS->named_load_global_inline_miss(), 1);
+ __ IncrementCounter(counters->named_load_global_inline_miss(), 1);
if (is_dont_delete_) {
- __ IncrementCounter(COUNTERS->dont_delete_hint_miss(), 1);
+ __ IncrementCounter(counters->dont_delete_hint_miss(), 1);
}
} else {
masm_->test(eax, Immediate(-delta_to_patch_site));
- __ IncrementCounter(COUNTERS->named_load_inline_miss(), 1);
+ __ IncrementCounter(counters->named_load_inline_miss(), 1);
}
if (!dst_.is(eax)) __ mov(dst_, eax);
// Here we use masm_-> instead of the __ macro because this is the
// instruction that gets patched and coverage code gets in the way.
masm_->test(eax, Immediate(-delta_to_patch_site));
- __ IncrementCounter(COUNTERS->keyed_load_inline_miss(), 1);
+ Counters* counters = masm()->isolate()->counters();
+ __ IncrementCounter(counters->keyed_load_inline_miss(), 1);
if (!dst_.is(eax)) __ mov(dst_, eax);
}
void DeferredReferenceSetKeyedValue::Generate() {
- __ IncrementCounter(COUNTERS->keyed_store_inline_miss(), 1);
+ Counters* counters = masm()->isolate()->counters();
+ __ IncrementCounter(counters->keyed_store_inline_miss(), 1);
// Move value_ to eax, key_ to ecx, and receiver_ to edx.
Register old_value = value_;
int original_height = frame()->height();
#endif
+ Isolate* isolate = masm()->isolate();
+ Factory* factory = isolate->factory();
+ Counters* counters = isolate->counters();
+
bool contextual_load_in_builtin =
is_contextual &&
- (masm()->isolate()->bootstrapper()->IsActive() ||
+ (isolate->bootstrapper()->IsActive() ||
(!info_->closure().is_null() && info_->closure()->IsBuiltin()));
Result result;
// use the double underscore macro that may insert instructions).
// Initially use an invalid map to force a failure.
masm()->cmp(FieldOperand(receiver.reg(), HeapObject::kMapOffset),
- Immediate(FACTORY->null_value()));
+ Immediate(factory->null_value()));
// This branch is always a forwards branch so it's always a fixed size
// which allows the assert below to succeed and patching to work.
deferred->Branch(not_equal);
if (is_contextual) {
// Load the (initialy invalid) cell and get its value.
- masm()->mov(result.reg(), FACTORY->null_value());
+ masm()->mov(result.reg(), factory->null_value());
if (FLAG_debug_code) {
__ cmp(FieldOperand(result.reg(), HeapObject::kMapOffset),
- FACTORY->global_property_cell_map());
+ factory->global_property_cell_map());
__ Assert(equal, "Uninitialized inlined contextual load");
}
__ mov(result.reg(),
FieldOperand(result.reg(), JSGlobalPropertyCell::kValueOffset));
- __ cmp(result.reg(), FACTORY->the_hole_value());
+ __ cmp(result.reg(), factory->the_hole_value());
deferred->Branch(equal);
bool is_dont_delete = false;
if (!info_->closure().is_null()) {
}
deferred->set_is_dont_delete(is_dont_delete);
if (!is_dont_delete) {
- __ cmp(result.reg(), FACTORY->the_hole_value());
+ __ cmp(result.reg(), factory->the_hole_value());
deferred->Branch(equal);
} else if (FLAG_debug_code) {
- __ cmp(result.reg(), FACTORY->the_hole_value());
+ __ cmp(result.reg(), factory->the_hole_value());
__ Check(not_equal, "DontDelete cells can't contain the hole");
}
- __ IncrementCounter(COUNTERS->named_load_global_inline(), 1);
+ __ IncrementCounter(counters->named_load_global_inline(), 1);
if (is_dont_delete) {
- __ IncrementCounter(COUNTERS->dont_delete_hint_hit(), 1);
+ __ IncrementCounter(counters->dont_delete_hint_hit(), 1);
}
} else {
// The initial (invalid) offset has to be large enough to force a 32-bit
// kMaxInt (minus kHeapObjectTag).
int offset = kMaxInt;
masm()->mov(result.reg(), FieldOperand(receiver.reg(), offset));
- __ IncrementCounter(COUNTERS->named_load_inline(), 1);
+ __ IncrementCounter(counters->named_load_inline(), 1);
}
deferred->BindExit();
result = elements;
__ cmp(Operand(result.reg()), Immediate(FACTORY->the_hole_value()));
deferred->Branch(equal);
- __ IncrementCounter(COUNTERS->keyed_load_inline(), 1);
+ __ IncrementCounter(masm_->isolate()->counters()->keyed_load_inline(), 1);
deferred->BindExit();
} else {
// Store the value.
__ mov(FixedArrayElementOperand(tmp.reg(), key.reg()), result.reg());
- __ IncrementCounter(COUNTERS->keyed_store_inline(), 1);
+ __ IncrementCounter(masm_->isolate()->counters()->keyed_store_inline(), 1);
deferred->BindExit();
} else {
__ test(Operand(src), Immediate(0x0F));
__ j(not_zero, &unaligned_source);
{
- __ IncrementCounter(COUNTERS->memcopy_aligned(), 1);
+ __ IncrementCounter(masm.isolate()->counters()->memcopy_aligned(), 1);
// Copy loop for aligned source and destination.
__ mov(edx, count);
Register loop_count = ecx;
// Copy loop for unaligned source and aligned destination.
// If source is not aligned, we can't read it as efficiently.
__ bind(&unaligned_source);
- __ IncrementCounter(COUNTERS->memcopy_unaligned(), 1);
+ __ IncrementCounter(masm.isolate()->counters()->memcopy_unaligned(), 1);
__ mov(edx, ecx);
Register loop_count = ecx;
Register count = edx;
}
} else {
- __ IncrementCounter(COUNTERS->memcopy_noxmm(), 1);
+ __ IncrementCounter(masm.isolate()->counters()->memcopy_noxmm(), 1);
// SSE2 not supported. Unlikely to happen in practice.
__ push(edi);
__ push(esi);
void FullCodeGenerator::EmitCallIC(Handle<Code> ic, JumpPatchSite* patch_site) {
+ Counters* counters = masm()->isolate()->counters();
switch (ic->kind()) {
case Code::LOAD_IC:
- __ IncrementCounter(COUNTERS->named_load_full(), 1);
+ __ IncrementCounter(counters->named_load_full(), 1);
break;
case Code::KEYED_LOAD_IC:
- __ IncrementCounter(COUNTERS->keyed_load_full(), 1);
+ __ IncrementCounter(counters->keyed_load_full(), 1);
break;
case Code::STORE_IC:
- __ IncrementCounter(COUNTERS->named_store_full(), 1);
+ __ IncrementCounter(counters->named_store_full(), 1);
break;
case Code::KEYED_STORE_IC:
- __ IncrementCounter(COUNTERS->keyed_store_full(), 1);
+ __ IncrementCounter(counters->keyed_store_full(), 1);
default:
break;
}
eax,
NULL,
&slow);
- __ IncrementCounter(COUNTERS->keyed_load_generic_smi(), 1);
+ Isolate* isolate = masm->isolate();
+ Counters* counters = isolate->counters();
+ __ IncrementCounter(counters->keyed_load_generic_smi(), 1);
__ ret(0);
__ bind(&check_number_dictionary);
// ebx: untagged index
// eax: key
// ecx: elements
- __ CheckMap(ecx, FACTORY->hash_table_map(), &slow, true);
+ __ CheckMap(ecx, isolate->factory()->hash_table_map(), &slow, true);
Label slow_pop_receiver;
// Push receiver on the stack to free up a register for the dictionary
// probing.
// Slow case: jump to runtime.
// edx: receiver
// eax: key
- __ IncrementCounter(COUNTERS->keyed_load_generic_slow(), 1);
+ __ IncrementCounter(counters->keyed_load_generic_slow(), 1);
GenerateRuntimeGetProperty(masm);
__ bind(&check_string);
// cache. Otherwise probe the dictionary.
__ mov(ebx, FieldOperand(edx, JSObject::kPropertiesOffset));
__ cmp(FieldOperand(ebx, HeapObject::kMapOffset),
- Immediate(FACTORY->hash_table_map()));
+ Immediate(isolate->factory()->hash_table_map()));
__ j(equal, &probe_dictionary);
// Load the map of the receiver, compute the keyed lookup cache hash
__ movzx_b(ecx, FieldOperand(ebx, Map::kInstanceSizeOffset));
__ add(ecx, Operand(edi));
__ mov(eax, FieldOperand(edx, ecx, times_pointer_size, 0));
- __ IncrementCounter(COUNTERS->keyed_load_generic_lookup_cache(), 1);
+ __ IncrementCounter(counters->keyed_load_generic_lookup_cache(), 1);
__ ret(0);
// Load property array property.
__ mov(eax, FieldOperand(edx, JSObject::kPropertiesOffset));
__ mov(eax, FieldOperand(eax, edi, times_pointer_size,
FixedArray::kHeaderSize));
- __ IncrementCounter(COUNTERS->keyed_load_generic_lookup_cache(), 1);
+ __ IncrementCounter(counters->keyed_load_generic_lookup_cache(), 1);
__ ret(0);
// Do a quick inline probe of the receiver's dictionary, if it
GenerateGlobalInstanceTypeCheck(masm, ecx, &slow);
GenerateDictionaryLoad(masm, &slow, ebx, eax, ecx, edi, eax);
- __ IncrementCounter(COUNTERS->keyed_load_generic_symbol(), 1);
+ __ IncrementCounter(counters->keyed_load_generic_symbol(), 1);
__ ret(0);
__ bind(&index_string);
// -- esp[(argc + 1) * 4] : receiver
// -----------------------------------
+ Counters* counters = masm->isolate()->counters();
if (id == IC::kCallIC_Miss) {
- __ IncrementCounter(COUNTERS->call_miss(), 1);
+ __ IncrementCounter(counters->call_miss(), 1);
} else {
- __ IncrementCounter(COUNTERS->keyed_call_miss(), 1);
+ __ IncrementCounter(counters->keyed_call_miss(), 1);
}
// Get the receiver of the function from the stack; 1 ~ return address.
GenerateFastArrayLoad(
masm, edx, ecx, eax, edi, &check_number_dictionary, &slow_load);
- __ IncrementCounter(COUNTERS->keyed_call_generic_smi_fast(), 1);
+ Isolate* isolate = masm->isolate();
+ Counters* counters = isolate->counters();
+ __ IncrementCounter(counters->keyed_call_generic_smi_fast(), 1);
__ bind(&do_call);
// receiver in edx is not used after this point.
// eax: elements
// ecx: smi key
// Check whether the elements is a number dictionary.
- __ CheckMap(eax, FACTORY->hash_table_map(), &slow_load, true);
+ __ CheckMap(eax, isolate->factory()->hash_table_map(), &slow_load, true);
__ mov(ebx, ecx);
__ SmiUntag(ebx);
// ebx: untagged index
// Receiver in edx will be clobbered, need to reload it on miss.
GenerateNumberDictionaryLoad(
masm, &slow_reload_receiver, eax, ecx, ebx, edx, edi, edi);
- __ IncrementCounter(COUNTERS->keyed_call_generic_smi_dict(), 1);
+ __ IncrementCounter(counters->keyed_call_generic_smi_dict(), 1);
__ jmp(&do_call);
__ bind(&slow_reload_receiver);
__ bind(&slow_load);
// This branch is taken when calling KeyedCallIC_Miss is neither required
// nor beneficial.
- __ IncrementCounter(COUNTERS->keyed_call_generic_slow_load(), 1);
+ __ IncrementCounter(counters->keyed_call_generic_slow_load(), 1);
__ EnterInternalFrame();
__ push(ecx); // save the key
__ push(edx); // pass the receiver
masm, edx, eax, Map::kHasNamedInterceptor, &lookup_monomorphic_cache);
__ mov(ebx, FieldOperand(edx, JSObject::kPropertiesOffset));
- __ CheckMap(ebx, FACTORY->hash_table_map(), &lookup_monomorphic_cache, true);
+ __ CheckMap(ebx,
+ isolate->factory()->hash_table_map(),
+ &lookup_monomorphic_cache,
+ true);
GenerateDictionaryLoad(masm, &slow_load, ebx, ecx, eax, edi, edi);
- __ IncrementCounter(COUNTERS->keyed_call_generic_lookup_dict(), 1);
+ __ IncrementCounter(counters->keyed_call_generic_lookup_dict(), 1);
__ jmp(&do_call);
__ bind(&lookup_monomorphic_cache);
- __ IncrementCounter(COUNTERS->keyed_call_generic_lookup_cache(), 1);
+ __ IncrementCounter(counters->keyed_call_generic_lookup_cache(), 1);
GenerateMonomorphicCacheProbe(masm, argc, Code::KEYED_CALL_IC);
// Fall through on miss.
// - the value loaded is not a function,
// - there is hope that the runtime will create a monomorphic call stub
// that will get fetched next time.
- __ IncrementCounter(COUNTERS->keyed_call_generic_slow(), 1);
+ __ IncrementCounter(counters->keyed_call_generic_slow(), 1);
GenerateMiss(masm, argc);
__ bind(&index_string);
// -- esp[0] : return address
// -----------------------------------
- __ IncrementCounter(COUNTERS->load_miss(), 1);
+ __ IncrementCounter(masm->isolate()->counters()->load_miss(), 1);
__ pop(ebx);
__ push(eax); // receiver
// -- esp[0] : return address
// -----------------------------------
- __ IncrementCounter(COUNTERS->keyed_load_miss(), 1);
+ __ IncrementCounter(masm->isolate()->counters()->keyed_load_miss(), 1);
__ pop(ebx);
__ push(edx); // receiver
__ push(edx);
GenerateDictionaryStore(masm, &restore_miss, ebx, ecx, eax, edx, edi);
__ Drop(1);
- __ IncrementCounter(COUNTERS->store_normal_hit(), 1);
+ Counters* counters = masm->isolate()->counters();
+ __ IncrementCounter(counters->store_normal_hit(), 1);
__ ret(0);
__ bind(&restore_miss);
__ pop(edx);
__ bind(&miss);
- __ IncrementCounter(COUNTERS->store_normal_miss(), 1);
+ __ IncrementCounter(counters->store_normal_miss(), 1);
GenerateMiss(masm);
}
Register r0,
Register r1) {
ASSERT(name->IsSymbol());
- __ IncrementCounter(COUNTERS->negative_lookups(), 1);
- __ IncrementCounter(COUNTERS->negative_lookups_miss(), 1);
+ Counters* counters = masm->isolate()->counters();
+ __ IncrementCounter(counters->negative_lookups(), 1);
+ __ IncrementCounter(counters->negative_lookups_miss(), 1);
Label done;
__ mov(r0, FieldOperand(receiver, HeapObject::kMapOffset));
}
__ bind(&done);
- __ DecrementCounter(COUNTERS->negative_lookups_miss(), 1);
+ __ DecrementCounter(counters->negative_lookups_miss(), 1);
}
(depth2 != kInvalidProtoDepth);
}
- __ IncrementCounter(COUNTERS->call_const_interceptor(), 1);
+ Counters* counters = masm->isolate()->counters();
+ __ IncrementCounter(counters->call_const_interceptor(), 1);
if (can_do_fast_api_call) {
- __ IncrementCounter(COUNTERS->call_const_interceptor_fast_api(), 1);
+ __ IncrementCounter(counters->call_const_interceptor_fast_api(), 1);
ReserveSpaceForFastApiCall(masm, scratch1);
}
__ test(edx, Immediate(kSmiTagMask));
__ j(zero, &miss_before_stack_reserved, not_taken);
- __ IncrementCounter(COUNTERS->call_const(), 1);
- __ IncrementCounter(COUNTERS->call_const_fast_api(), 1);
+ Counters* counters = masm()->isolate()->counters();
+ __ IncrementCounter(counters->call_const(), 1);
+ __ IncrementCounter(counters->call_const_fast_api(), 1);
// Allocate space for v8::Arguments implicit values. Must be initialized
// before calling any runtime function.
SharedFunctionInfo* function_info = function->shared();
switch (check) {
case RECEIVER_MAP_CHECK:
- __ IncrementCounter(COUNTERS->call_const(), 1);
+ __ IncrementCounter(masm()->isolate()->counters()->call_const(), 1);
// Check that the maps haven't changed.
CheckPrototypes(JSObject::cast(object), edx, holder,
__ mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
// Jump to the cached code (tail call).
- __ IncrementCounter(COUNTERS->call_global_inline(), 1);
+ Counters* counters = masm()->isolate()->counters();
+ __ IncrementCounter(counters->call_global_inline(), 1);
ASSERT(function->is_compiled());
ParameterCount expected(function->shared()->formal_parameter_count());
if (V8::UseCrankshaft()) {
// Handle call cache miss.
__ bind(&miss);
- __ IncrementCounter(COUNTERS->call_global_inline_miss(), 1);
+ __ IncrementCounter(counters->call_global_inline_miss(), 1);
MaybeObject* maybe_result = GenerateMissBranch();
if (maybe_result->IsFailure()) return maybe_result;
__ mov(cell_operand, eax);
// Return the value (register eax).
- __ IncrementCounter(COUNTERS->named_store_global_inline(), 1);
+ Counters* counters = masm()->isolate()->counters();
+ __ IncrementCounter(counters->named_store_global_inline(), 1);
__ ret(0);
// Handle store cache miss.
__ bind(&miss);
- __ IncrementCounter(COUNTERS->named_store_global_inline_miss(), 1);
+ __ IncrementCounter(counters->named_store_global_inline_miss(), 1);
Handle<Code> ic(Isolate::Current()->builtins()->builtin(
Builtins::StoreIC_Miss));
__ jmp(ic, RelocInfo::CODE_TARGET);
// -----------------------------------
Label miss;
- __ IncrementCounter(COUNTERS->keyed_store_field(), 1);
+ Counters* counters = masm()->isolate()->counters();
+ __ IncrementCounter(counters->keyed_store_field(), 1);
// Check that the name has not changed.
__ cmp(Operand(ecx), Immediate(Handle<String>(name)));
// Handle store cache miss.
__ bind(&miss);
- __ DecrementCounter(COUNTERS->keyed_store_field(), 1);
+ __ DecrementCounter(counters->keyed_store_field(), 1);
Handle<Code> ic(Isolate::Current()->builtins()->builtin(
Builtins::KeyedStoreIC_Miss));
__ jmp(ic, RelocInfo::CODE_TARGET);
__ Check(not_equal, "DontDelete cells can't contain the hole");
}
- __ IncrementCounter(COUNTERS->named_load_global_stub(), 1);
+ Counters* counters = masm()->isolate()->counters();
+ __ IncrementCounter(counters->named_load_global_stub(), 1);
__ mov(eax, ebx);
__ ret(0);
__ bind(&miss);
- __ IncrementCounter(COUNTERS->named_load_global_stub_miss(), 1);
+ __ IncrementCounter(counters->named_load_global_stub_miss(), 1);
GenerateLoadMiss(masm(), Code::LOAD_IC);
// Return the generated code.
// -----------------------------------
Label miss;
- __ IncrementCounter(COUNTERS->keyed_load_field(), 1);
+ Counters* counters = masm()->isolate()->counters();
+ __ IncrementCounter(counters->keyed_load_field(), 1);
// Check that the name has not changed.
__ cmp(Operand(eax), Immediate(Handle<String>(name)));
GenerateLoadField(receiver, holder, edx, ebx, ecx, edi, index, name, &miss);
__ bind(&miss);
- __ DecrementCounter(COUNTERS->keyed_load_field(), 1);
+ __ DecrementCounter(counters->keyed_load_field(), 1);
GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
// Return the generated code.
// -----------------------------------
Label miss;
- __ IncrementCounter(COUNTERS->keyed_load_callback(), 1);
+ Counters* counters = masm()->isolate()->counters();
+ __ IncrementCounter(counters->keyed_load_callback(), 1);
// Check that the name has not changed.
__ cmp(Operand(eax), Immediate(Handle<String>(name)));
__ bind(&miss);
- __ DecrementCounter(COUNTERS->keyed_load_callback(), 1);
+ __ DecrementCounter(counters->keyed_load_callback(), 1);
GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
// Return the generated code.
// -----------------------------------
Label miss;
- __ IncrementCounter(COUNTERS->keyed_load_constant_function(), 1);
+ Counters* counters = masm()->isolate()->counters();
+ __ IncrementCounter(counters->keyed_load_constant_function(), 1);
// Check that the name has not changed.
__ cmp(Operand(eax), Immediate(Handle<String>(name)));
GenerateLoadConstant(receiver, holder, edx, ebx, ecx, edi,
value, name, &miss);
__ bind(&miss);
- __ DecrementCounter(COUNTERS->keyed_load_constant_function(), 1);
+ __ DecrementCounter(counters->keyed_load_constant_function(), 1);
GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
// Return the generated code.
// -----------------------------------
Label miss;
- __ IncrementCounter(COUNTERS->keyed_load_interceptor(), 1);
+ Counters* counters = masm()->isolate()->counters();
+ __ IncrementCounter(counters->keyed_load_interceptor(), 1);
// Check that the name has not changed.
__ cmp(Operand(eax), Immediate(Handle<String>(name)));
name,
&miss);
__ bind(&miss);
- __ DecrementCounter(COUNTERS->keyed_load_interceptor(), 1);
+ __ DecrementCounter(counters->keyed_load_interceptor(), 1);
GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
// Return the generated code.
// -----------------------------------
Label miss;
- __ IncrementCounter(COUNTERS->keyed_load_array_length(), 1);
+ Counters* counters = masm()->isolate()->counters();
+ __ IncrementCounter(counters->keyed_load_array_length(), 1);
// Check that the name has not changed.
__ cmp(Operand(eax), Immediate(Handle<String>(name)));
GenerateLoadArrayLength(masm(), edx, ecx, &miss);
__ bind(&miss);
- __ DecrementCounter(COUNTERS->keyed_load_array_length(), 1);
+ __ DecrementCounter(counters->keyed_load_array_length(), 1);
GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
// Return the generated code.
// -----------------------------------
Label miss;
- __ IncrementCounter(COUNTERS->keyed_load_string_length(), 1);
+ Counters* counters = masm()->isolate()->counters();
+ __ IncrementCounter(counters->keyed_load_string_length(), 1);
// Check that the name has not changed.
__ cmp(Operand(eax), Immediate(Handle<String>(name)));
GenerateLoadStringLength(masm(), edx, ecx, ebx, &miss, true);
__ bind(&miss);
- __ DecrementCounter(COUNTERS->keyed_load_string_length(), 1);
+ __ DecrementCounter(counters->keyed_load_string_length(), 1);
GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
// Return the generated code.
// -----------------------------------
Label miss;
- __ IncrementCounter(COUNTERS->keyed_load_function_prototype(), 1);
+ Counters* counters = masm()->isolate()->counters();
+ __ IncrementCounter(counters->keyed_load_function_prototype(), 1);
// Check that the name has not changed.
__ cmp(Operand(eax), Immediate(Handle<String>(name)));
GenerateLoadFunctionPrototype(masm(), edx, ecx, ebx, &miss);
__ bind(&miss);
- __ DecrementCounter(COUNTERS->keyed_load_function_prototype(), 1);
+ __ DecrementCounter(counters->keyed_load_function_prototype(), 1);
GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
// Return the generated code.
__ pop(ecx);
__ lea(esp, Operand(esp, ebx, times_pointer_size, 1 * kPointerSize));
__ push(ecx);
- __ IncrementCounter(COUNTERS->constructed_objects(), 1);
- __ IncrementCounter(COUNTERS->constructed_objects_stub(), 1);
+ Counters* counters = masm()->isolate()->counters();
+ __ IncrementCounter(counters->constructed_objects(), 1);
+ __ IncrementCounter(counters->constructed_objects_stub(), 1);
__ ret(0);
// Jump to the generic stub in case the specialized code cannot handle the
// Slow case: Jump to runtime.
__ bind(&slow);
- __ IncrementCounter(COUNTERS->keyed_load_external_array_slow(), 1);
+ Counters* counters = masm()->isolate()->counters();
+ __ IncrementCounter(counters->keyed_load_external_array_slow(), 1);
// ----------- S t a t e -------------
// -- eax : key
// -- edx : receiver
// the directionality of the block. Compute: an entry frame for the
// block.
- COUNTERS->compute_entry_frame()->Increment();
+ Isolate::Current()->counters()->compute_entry_frame()->Increment();
#ifdef DEBUG
if (Isolate::Current()->jump_target_compiling_deferred_code()) {
ASSERT(reaching_frames_.length() > 1);
//
class SlidingStateWindow {
public:
- SlidingStateWindow();
+ explicit SlidingStateWindow(Isolate* isolate);
~SlidingStateWindow();
void AddState(StateTag state);
private:
static const int kBufferSize = 256;
+ Counters* counters_;
int current_index_;
bool is_full_;
byte buffer_[kBufferSize];
void IncrementStateCounter(StateTag state) {
- COUNTERS->state_counters(state)->Increment();
+ counters_->state_counters(state)->Increment();
}
void DecrementStateCounter(StateTag state) {
- COUNTERS->state_counters(state)->Decrement();
+ counters_->state_counters(state)->Decrement();
}
};
//
// SlidingStateWindow implementation.
//
-SlidingStateWindow::SlidingStateWindow(): current_index_(0), is_full_(false) {
+SlidingStateWindow::SlidingStateWindow(Isolate* isolate)
+ : counters_(isolate->counters()), current_index_(0), is_full_(false) {
for (int i = 0; i < kBufferSize; i++) {
buffer_[i] = static_cast<byte>(OTHER);
}
- LOGGER->ticker_->SetWindow(this);
+ isolate->logger()->ticker_->SetWindow(this);
}
ticker_ = new Ticker(Isolate::Current(), kSamplingIntervalMs);
+ Isolate* isolate = Isolate::Current();
if (FLAG_sliding_state_window && sliding_state_window_ == NULL) {
- sliding_state_window_ = new SlidingStateWindow();
+ sliding_state_window_ = new SlidingStateWindow(isolate);
}
bool start_logging = FLAG_log || FLAG_log_runtime || FLAG_log_api
}
if (FLAG_prof) {
- profiler_ = new Profiler(Isolate::Current());
+ profiler_ = new Profiler(isolate);
if (!FLAG_prof_auto) {
profiler_->pause();
} else {
// Otherwise, if the sliding state window computation has not been
// started we do it now.
if (sliding_state_window_ == NULL) {
- sliding_state_window_ = new SlidingStateWindow();
+ sliding_state_window_ = new SlidingStateWindow(Isolate::Current());
}
#endif
}
MaybeObject* NormalizedMapCache::Get(JSObject* obj,
PropertyNormalizationMode mode) {
+ Isolate* isolate = obj->GetIsolate();
Map* fast = obj->map();
int index = Hash(fast) % kEntries;
Object* result = get(index);
if (!maybe_result->ToObject(&result)) return maybe_result;
}
set(index, result);
- COUNTERS->normalized_maps()->Increment();
+ isolate->counters()->normalized_maps()->Increment();
return result;
}
UNIQUE_NORMALIZED_MAP);
if (!maybe_obj->ToObject(&obj)) return maybe_obj;
}
- COUNTERS->normalized_maps()->Increment();
+ GetIsolate()->counters()->normalized_maps()->Increment();
set_map(Map::cast(obj));
}
symbol_cache_.at(symbol_id) = result;
return result;
}
- COUNTERS->total_preparse_symbols_skipped()->Increment();
+ isolate()->counters()->total_preparse_symbols_skipped()->Increment();
return result;
}
StrictModeFlag strict_mode) {
CompilationZoneScope zone_scope(DONT_DELETE_ON_EXIT);
- HistogramTimerScope timer(COUNTERS->parse());
- COUNTERS->total_parse_size()->Increment(source->length());
+ HistogramTimerScope timer(isolate()->counters()->parse());
+ isolate()->counters()->total_parse_size()->Increment(source->length());
fni_ = new FuncNameInferrer();
// Initialize parser state.
FunctionLiteral* Parser::ParseLazy(CompilationInfo* info) {
CompilationZoneScope zone_scope(DONT_DELETE_ON_EXIT);
- HistogramTimerScope timer(COUNTERS->parse_lazy());
+ HistogramTimerScope timer(isolate()->counters()->parse_lazy());
Handle<String> source(String::cast(script_->source()));
- COUNTERS->total_parse_size()->Increment(source->length());
+ isolate()->counters()->total_parse_size()->Increment(source->length());
Handle<SharedFunctionInfo> shared_info = info->shared_info();
// Initialize parser state.
// End position greater than end of stream is safe, and hard to check.
ReportInvalidPreparseData(name, CHECK_OK);
}
- COUNTERS->total_preparse_skipped()->Increment(
+ isolate()->counters()->total_preparse_skipped()->Increment(
end_pos - function_block_pos);
// Seek to position just before terminal '}'.
scanner().SeekForward(end_pos - 1);
}
if (JSObject::cast(*boilerplate)->elements()->map() ==
isolate->heap()->fixed_cow_array_map()) {
- COUNTERS->cow_arrays_created_runtime()->Increment();
+ isolate->counters()->cow_arrays_created_runtime()->Increment();
}
return isolate->heap()->CopyJSObject(JSObject::cast(*boilerplate));
}
void Runtime::PerformGC(Object* result) {
+ Isolate* isolate = Isolate::Current();
Failure* failure = Failure::cast(result);
if (failure->IsRetryAfterGC()) {
// Try to do a garbage collection; ignore it if it fails. The C
// entry stub will throw an out-of-memory exception in that case.
- HEAP->CollectGarbage(failure->allocation_space());
+ isolate->heap()->CollectGarbage(failure->allocation_space());
} else {
// Handle last resort GC and make sure to allow future allocations
// to grow the heap without causing GCs (if possible).
- COUNTERS->gc_last_resort_from_js()->Increment();
- HEAP->CollectAllGarbage(false);
+ isolate->counters()->gc_last_resort_from_js()->Increment();
+ isolate->heap()->CollectAllGarbage(false);
}
}
#ifdef DEBUG
ZapBlock(reinterpret_cast<Address>(mem), alloced);
#endif
- COUNTERS->memory_allocated()->Increment(alloced);
+ isolate_->counters()->memory_allocated()->Increment(alloced);
return mem;
}
} else {
OS::Free(mem, length);
}
- COUNTERS->memory_allocated()->Decrement(static_cast<int>(length));
+ isolate_->counters()->memory_allocated()->Decrement(static_cast<int>(length));
size_ -= static_cast<int>(length);
if (executable == EXECUTABLE) size_executable_ -= static_cast<int>(length);
#ifdef DEBUG
ZapBlock(start, size);
#endif
- COUNTERS->memory_allocated()->Increment(static_cast<int>(size));
+ isolate_->counters()->memory_allocated()->Increment(static_cast<int>(size));
// So long as we correctly overestimated the number of chunks we should not
// run out of chunk ids.
#ifdef DEBUG
ZapBlock(start, size);
#endif
- COUNTERS->memory_allocated()->Increment(static_cast<int>(size));
+ isolate_->counters()->memory_allocated()->Increment(static_cast<int>(size));
return true;
}
ASSERT(InInitialChunk(start + size - 1));
if (!initial_chunk_->Uncommit(start, size)) return false;
- COUNTERS->memory_allocated()->Decrement(static_cast<int>(size));
+ isolate_->counters()->memory_allocated()->Decrement(static_cast<int>(size));
return true;
}
// TODO(1240712): VirtualMemory::Uncommit has a return value which
// is ignored here.
initial_chunk_->Uncommit(c.address(), c.size());
- COUNTERS->memory_allocated()->Decrement(static_cast<int>(c.size()));
+ Counters* counters = isolate_->counters();
+ counters->memory_allocated()->Decrement(static_cast<int>(c.size()));
} else {
LOG(isolate_, DeleteEvent("PagedChunk", c.address()));
ObjectSpace space = static_cast<ObjectSpace>(1 << c.owner_identity());
LargeObjectChunk* chunk = reinterpret_cast<LargeObjectChunk*>(mem);
chunk->size_ = size;
Page* page = Page::FromAddress(RoundUp(chunk->address(), Page::kPageSize));
- page->heap_ = Isolate::Current()->heap();
+ page->heap_ = isolate->heap();
return chunk;
}
GetCodeWithFlags(flags, "CompileCallInitialize");
if (!maybe_result->ToObject(&result)) return maybe_result;
}
- COUNTERS->call_initialize_stubs()->Increment();
+ isolate()->counters()->call_initialize_stubs()->Increment();
Code* code = Code::cast(result);
USE(code);
PROFILE(isolate(),
GetCodeWithFlags(flags, "CompileCallPreMonomorphic");
if (!maybe_result->ToObject(&result)) return maybe_result;
}
- COUNTERS->call_premonomorphic_stubs()->Increment();
+ isolate()->counters()->call_premonomorphic_stubs()->Increment();
Code* code = Code::cast(result);
USE(code);
PROFILE(isolate(),
{ MaybeObject* maybe_result = GetCodeWithFlags(flags, "CompileCallNormal");
if (!maybe_result->ToObject(&result)) return maybe_result;
}
- COUNTERS->call_normal_stubs()->Increment();
+ isolate()->counters()->call_normal_stubs()->Increment();
Code* code = Code::cast(result);
USE(code);
PROFILE(isolate(),
GetCodeWithFlags(flags, "CompileCallMegamorphic");
if (!maybe_result->ToObject(&result)) return maybe_result;
}
- COUNTERS->call_megamorphic_stubs()->Increment();
+ isolate()->counters()->call_megamorphic_stubs()->Increment();
Code* code = Code::cast(result);
USE(code);
PROFILE(isolate(),
{ MaybeObject* maybe_result = GetCodeWithFlags(flags, "CompileCallMiss");
if (!maybe_result->ToObject(&result)) return maybe_result;
}
- COUNTERS->call_megamorphic_stubs()->Increment();
+ isolate()->counters()->call_megamorphic_stubs()->Increment();
Code* code = Code::cast(result);
USE(code);
PROFILE(isolate(),
SC(memcopy_noxmm, V8.MemCopyNoXMM) \
SC(enum_cache_hits, V8.EnumCacheHits) \
SC(enum_cache_misses, V8.EnumCacheMisses) \
- SC(reloc_info_count, V8.RelocInfoCount) \
- SC(reloc_info_size, V8.RelocInfoSize) \
SC(zone_segment_bytes, V8.ZoneSegmentBytes) \
SC(compute_entry_frame, V8.ComputeEntryFrame) \
SC(generic_binary_stub_calls, V8.GenericBinaryStubCalls) \
DISALLOW_IMPLICIT_CONSTRUCTORS(Counters);
};
-#define COUNTERS Isolate::Current()->counters()
-
} } // namespace v8::internal
#endif // V8_V8_COUNTERS_H_
desc->reloc_size =
static_cast<int>((buffer_ + buffer_size_) - reloc_info_writer.pos());
desc->origin = this;
-
- COUNTERS->reloc_info_size()->Increment(desc->reloc_size);
}
SmiIndex index = masm->SmiToIndex(rbx, rbx, kPointerSizeLog2);
__ lea(rsp, Operand(rsp, index.reg, index.scale, 1 * kPointerSize));
__ push(rcx);
- __ IncrementCounter(COUNTERS->constructed_objects(), 1);
+ Counters* counters = masm->isolate()->counters();
+ __ IncrementCounter(counters->constructed_objects(), 1);
__ ret(0);
}
r8,
kPreallocatedArrayElements,
call_generic_code);
- __ IncrementCounter(COUNTERS->array_function_native(), 1);
+ Counters* counters = masm->isolate()->counters();
+ __ IncrementCounter(counters->array_function_native(), 1);
__ movq(rax, rbx);
__ ret(kPointerSize);
r9,
true,
call_generic_code);
- __ IncrementCounter(COUNTERS->array_function_native(), 1);
+ __ IncrementCounter(counters->array_function_native(), 1);
__ movq(rax, rbx);
__ ret(2 * kPointerSize);
r9,
false,
call_generic_code);
- __ IncrementCounter(COUNTERS->array_function_native(), 1);
+ __ IncrementCounter(counters->array_function_native(), 1);
// rax: argc
// rbx: JSArray
// -----------------------------------
Label invoke, dont_adapt_arguments;
- __ IncrementCounter(COUNTERS->arguments_adaptors(), 1);
+ Counters* counters = masm->isolate()->counters();
+ __ IncrementCounter(counters->arguments_adaptors(), 1);
Label enough, too_few;
__ cmpq(rax, rbx);
// Update flags to indicate that arguments are in registers.
SetArgsInRegisters();
- __ IncrementCounter(COUNTERS->generic_binary_stub_calls_regs(), 1);
+ Counters* counters = masm->isolate()->counters();
+ __ IncrementCounter(counters->generic_binary_stub_calls_regs(), 1);
}
// Call the stub.
// Update flags to indicate that arguments are in registers.
SetArgsInRegisters();
- __ IncrementCounter(COUNTERS->generic_binary_stub_calls_regs(), 1);
+ Counters* counters = masm->isolate()->counters();
+ __ IncrementCounter(counters->generic_binary_stub_calls_regs(), 1);
}
// Call the stub.
}
// Update flags to indicate that arguments are in registers.
SetArgsInRegisters();
- __ IncrementCounter(COUNTERS->generic_binary_stub_calls_regs(), 1);
+ Counters* counters = masm->isolate()->counters();
+ __ IncrementCounter(counters->generic_binary_stub_calls_regs(), 1);
}
// Call the stub.
// rcx: encoding of subject string (1 if ascii 0 if two_byte);
// r11: code
// All checks done. Now push arguments for native regexp code.
- __ IncrementCounter(COUNTERS->regexp_entry_native(), 1);
+ Counters* counters = masm->isolate()->counters();
+ __ IncrementCounter(counters->regexp_entry_native(), 1);
// Isolates: note we add an additional parameter here (isolate pointer).
static const int kRegExpExecuteArguments = 8;
index,
times_1,
FixedArray::kHeaderSize + kPointerSize));
- __ IncrementCounter(COUNTERS->number_to_string_native(), 1);
+ Counters* counters = masm->isolate()->counters();
+ __ IncrementCounter(counters->number_to_string_native(), 1);
}
__ SmiTest(rcx);
__ j(not_zero, &second_not_zero_length);
// Second string is empty, result is first string which is already in rax.
- __ IncrementCounter(COUNTERS->string_add_native(), 1);
+ Counters* counters = masm->isolate()->counters();
+ __ IncrementCounter(counters->string_add_native(), 1);
__ ret(2 * kPointerSize);
__ bind(&second_not_zero_length);
__ movq(rbx, FieldOperand(rax, String::kLengthOffset));
__ j(not_zero, &both_not_zero_length);
// First string is empty, result is second string which is in rdx.
__ movq(rax, rdx);
- __ IncrementCounter(COUNTERS->string_add_native(), 1);
+ __ IncrementCounter(counters->string_add_native(), 1);
__ ret(2 * kPointerSize);
// Both strings are non-empty.
Label make_two_character_string, make_flat_ascii_string;
StringHelper::GenerateTwoCharacterSymbolTableProbe(
masm, rbx, rcx, r14, r11, rdi, r15, &make_two_character_string);
- __ IncrementCounter(COUNTERS->string_add_native(), 1);
+ __ IncrementCounter(counters->string_add_native(), 1);
__ ret(2 * kPointerSize);
__ bind(&make_two_character_string);
__ movq(FieldOperand(rcx, ConsString::kFirstOffset), rax);
__ movq(FieldOperand(rcx, ConsString::kSecondOffset), rdx);
__ movq(rax, rcx);
- __ IncrementCounter(COUNTERS->string_add_native(), 1);
+ __ IncrementCounter(counters->string_add_native(), 1);
__ ret(2 * kPointerSize);
__ bind(&non_ascii);
// At least one of the strings is two-byte. Check whether it happens
// rdi: length of second argument
StringHelper::GenerateCopyCharacters(masm, rcx, rdx, rdi, true);
__ movq(rax, rbx);
- __ IncrementCounter(COUNTERS->string_add_native(), 1);
+ __ IncrementCounter(counters->string_add_native(), 1);
__ ret(2 * kPointerSize);
// Handle creating a flat two byte result.
// rdi: length of second argument
StringHelper::GenerateCopyCharacters(masm, rcx, rdx, rdi, false);
__ movq(rax, rbx);
- __ IncrementCounter(COUNTERS->string_add_native(), 1);
+ __ IncrementCounter(counters->string_add_native(), 1);
__ ret(2 * kPointerSize);
// Just jump to runtime to add the two strings.
// rsi: character of sub string start
StringHelper::GenerateCopyCharactersREP(masm, rdi, rsi, rcx, true);
__ movq(rsi, rdx); // Restore rsi.
- __ IncrementCounter(COUNTERS->sub_string_native(), 1);
+ Counters* counters = masm->isolate()->counters();
+ __ IncrementCounter(counters->sub_string_native(), 1);
__ ret(kArgumentsSize);
__ bind(&non_ascii_flat);
__ movq(rsi, rdx); // Restore esi.
__ bind(&return_rax);
- __ IncrementCounter(COUNTERS->sub_string_native(), 1);
+ __ IncrementCounter(counters->sub_string_native(), 1);
__ ret(kArgumentsSize);
// Just jump to runtime to create the sub string.
__ cmpq(rdx, rax);
__ j(not_equal, ¬_same);
__ Move(rax, Smi::FromInt(EQUAL));
- __ IncrementCounter(COUNTERS->string_compare_native(), 1);
+ Counters* counters = masm->isolate()->counters();
+ __ IncrementCounter(counters->string_compare_native(), 1);
__ ret(2 * kPointerSize);
__ bind(¬_same);
__ JumpIfNotBothSequentialAsciiStrings(rdx, rax, rcx, rbx, &runtime);
// Inline comparison of ascii strings.
- __ IncrementCounter(COUNTERS->string_compare_native(), 1);
+ __ IncrementCounter(counters->string_compare_native(), 1);
// Drop arguments from the stack
__ pop(rcx);
__ addq(rsp, Immediate(2 * kPointerSize));
FastCloneShallowArrayStub stub(
FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS, length);
clone = frame_->CallStub(&stub, 3);
- __ IncrementCounter(COUNTERS->cow_arrays_created_stub(), 1);
+ Counters* counters = masm()->isolate()->counters();
+ __ IncrementCounter(counters->cow_arrays_created_stub(), 1);
} else if (node->depth() > 1) {
clone = frame_->CallRuntime(Runtime::kCreateArrayLiteral, 3);
} else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) {
// Here we use masm_-> instead of the __ macro because this is the
// instruction that gets patched and coverage code gets in the way.
masm_->testl(rax, Immediate(-delta_to_patch_site));
- __ IncrementCounter(COUNTERS->named_load_inline_miss(), 1);
+ Counters* counters = masm()->isolate()->counters();
+ __ IncrementCounter(counters->named_load_inline_miss(), 1);
if (!dst_.is(rax)) __ movq(dst_, rax);
}
// 7-byte NOP with non-zero immediate (0f 1f 80 xxxxxxxx) which won't
// be generated normally.
masm_->testl(rax, Immediate(-delta_to_patch_site));
- __ IncrementCounter(COUNTERS->keyed_load_inline_miss(), 1);
+ Counters* counters = masm()->isolate()->counters();
+ __ IncrementCounter(counters->keyed_load_inline_miss(), 1);
if (!dst_.is(rax)) __ movq(dst_, rax);
}
void DeferredReferenceSetKeyedValue::Generate() {
- __ IncrementCounter(COUNTERS->keyed_store_inline_miss(), 1);
+ Counters* counters = masm()->isolate()->counters();
+ __ IncrementCounter(counters->keyed_store_inline_miss(), 1);
// Move value, receiver, and key to registers rax, rdx, and rcx, as
// the IC stub expects.
// Move value to rax, using xchg if the receiver or key is in rax.
int offset = kMaxInt;
masm()->movq(result.reg(), FieldOperand(receiver.reg(), offset));
- __ IncrementCounter(COUNTERS->named_load_inline(), 1);
+ Counters* counters = masm()->isolate()->counters();
+ __ IncrementCounter(counters->named_load_inline(), 1);
deferred->BindExit();
}
ASSERT(frame()->height() == original_height - 1);
result = elements;
__ CompareRoot(result.reg(), Heap::kTheHoleValueRootIndex);
deferred->Branch(equal);
- __ IncrementCounter(COUNTERS->keyed_load_inline(), 1);
+ Counters* counters = masm()->isolate()->counters();
+ __ IncrementCounter(counters->keyed_load_inline(), 1);
deferred->BindExit();
} else {
index.scale,
FixedArray::kHeaderSize),
result.reg());
- __ IncrementCounter(COUNTERS->keyed_store_inline(), 1);
+ Counters* counters = masm()->isolate()->counters();
+ __ IncrementCounter(counters->keyed_store_inline(), 1);
deferred->BindExit();
} else {
void FullCodeGenerator::EmitCallIC(Handle<Code> ic, RelocInfo::Mode mode) {
ASSERT(mode == RelocInfo::CODE_TARGET ||
mode == RelocInfo::CODE_TARGET_CONTEXT);
+ Counters* counters = isolate()->counters();
switch (ic->kind()) {
case Code::LOAD_IC:
- __ IncrementCounter(COUNTERS->named_load_full(), 1);
+ __ IncrementCounter(counters->named_load_full(), 1);
break;
case Code::KEYED_LOAD_IC:
- __ IncrementCounter(COUNTERS->keyed_load_full(), 1);
+ __ IncrementCounter(counters->keyed_load_full(), 1);
break;
case Code::STORE_IC:
- __ IncrementCounter(COUNTERS->named_store_full(), 1);
+ __ IncrementCounter(counters->named_store_full(), 1);
break;
case Code::KEYED_STORE_IC:
- __ IncrementCounter(COUNTERS->keyed_store_full(), 1);
+ __ IncrementCounter(counters->keyed_store_full(), 1);
default:
break;
}
void FullCodeGenerator::EmitCallIC(Handle<Code> ic, JumpPatchSite* patch_site) {
+ Counters* counters = isolate()->counters();
switch (ic->kind()) {
case Code::LOAD_IC:
- __ IncrementCounter(COUNTERS->named_load_full(), 1);
+ __ IncrementCounter(counters->named_load_full(), 1);
break;
case Code::KEYED_LOAD_IC:
- __ IncrementCounter(COUNTERS->keyed_load_full(), 1);
+ __ IncrementCounter(counters->keyed_load_full(), 1);
break;
case Code::STORE_IC:
- __ IncrementCounter(COUNTERS->named_store_full(), 1);
+ __ IncrementCounter(counters->named_store_full(), 1);
break;
case Code::KEYED_STORE_IC:
- __ IncrementCounter(COUNTERS->keyed_store_full(), 1);
+ __ IncrementCounter(counters->keyed_store_full(), 1);
default:
break;
}
rax,
NULL,
&slow);
- __ IncrementCounter(COUNTERS->keyed_load_generic_smi(), 1);
+ Counters* counters = masm->isolate()->counters();
+ __ IncrementCounter(counters->keyed_load_generic_smi(), 1);
__ ret(0);
__ bind(&check_number_dictionary);
// Slow case: Jump to runtime.
// rdx: receiver
// rax: key
- __ IncrementCounter(COUNTERS->keyed_load_generic_slow(), 1);
+ __ IncrementCounter(counters->keyed_load_generic_slow(), 1);
GenerateRuntimeGetProperty(masm);
__ bind(&check_string);
__ movzxbq(rcx, FieldOperand(rbx, Map::kInstanceSizeOffset));
__ addq(rcx, rdi);
__ movq(rax, FieldOperand(rdx, rcx, times_pointer_size, 0));
- __ IncrementCounter(COUNTERS->keyed_load_generic_lookup_cache(), 1);
+ __ IncrementCounter(counters->keyed_load_generic_lookup_cache(), 1);
__ ret(0);
// Load property array property.
__ movq(rax, FieldOperand(rdx, JSObject::kPropertiesOffset));
__ movq(rax, FieldOperand(rax, rdi, times_pointer_size,
FixedArray::kHeaderSize));
- __ IncrementCounter(COUNTERS->keyed_load_generic_lookup_cache(), 1);
+ __ IncrementCounter(counters->keyed_load_generic_lookup_cache(), 1);
__ ret(0);
// Do a quick inline probe of the receiver's dictionary, if it
GenerateGlobalInstanceTypeCheck(masm, rcx, &slow);
GenerateDictionaryLoad(masm, &slow, rbx, rax, rcx, rdi, rax);
- __ IncrementCounter(COUNTERS->keyed_load_generic_symbol(), 1);
+ __ IncrementCounter(counters->keyed_load_generic_symbol(), 1);
__ ret(0);
__ bind(&index_string);
// rsp[(argc + 1) * 8] : argument 0 = receiver
// -----------------------------------
+ Counters* counters = masm->isolate()->counters();
if (id == IC::kCallIC_Miss) {
- __ IncrementCounter(COUNTERS->call_miss(), 1);
+ __ IncrementCounter(counters->call_miss(), 1);
} else {
- __ IncrementCounter(COUNTERS->keyed_call_miss(), 1);
+ __ IncrementCounter(counters->keyed_call_miss(), 1);
}
// Get the receiver of the function from the stack; 1 ~ return address.
GenerateFastArrayLoad(
masm, rdx, rcx, rax, rbx, rdi, &check_number_dictionary, &slow_load);
- __ IncrementCounter(COUNTERS->keyed_call_generic_smi_fast(), 1);
+ Counters* counters = masm->isolate()->counters();
+ __ IncrementCounter(counters->keyed_call_generic_smi_fast(), 1);
__ bind(&do_call);
// receiver in rdx is not used after this point.
__ SmiToInteger32(rbx, rcx);
// ebx: untagged index
GenerateNumberDictionaryLoad(masm, &slow_load, rax, rcx, rbx, r9, rdi, rdi);
- __ IncrementCounter(COUNTERS->keyed_call_generic_smi_dict(), 1);
+ __ IncrementCounter(counters->keyed_call_generic_smi_dict(), 1);
__ jmp(&do_call);
__ bind(&slow_load);
// This branch is taken when calling KeyedCallIC_Miss is neither required
// nor beneficial.
- __ IncrementCounter(COUNTERS->keyed_call_generic_slow_load(), 1);
+ __ IncrementCounter(counters->keyed_call_generic_slow_load(), 1);
__ EnterInternalFrame();
__ push(rcx); // save the key
__ push(rdx); // pass the receiver
__ j(not_equal, &lookup_monomorphic_cache);
GenerateDictionaryLoad(masm, &slow_load, rbx, rcx, rax, rdi, rdi);
- __ IncrementCounter(COUNTERS->keyed_call_generic_lookup_dict(), 1);
+ __ IncrementCounter(counters->keyed_call_generic_lookup_dict(), 1);
__ jmp(&do_call);
__ bind(&lookup_monomorphic_cache);
- __ IncrementCounter(COUNTERS->keyed_call_generic_lookup_cache(), 1);
+ __ IncrementCounter(counters->keyed_call_generic_lookup_cache(), 1);
GenerateMonomorphicCacheProbe(masm, argc, Code::KEYED_CALL_IC);
// Fall through on miss.
// - the value loaded is not a function,
// - there is hope that the runtime will create a monomorphic call stub
// that will get fetched next time.
- __ IncrementCounter(COUNTERS->keyed_call_generic_slow(), 1);
+ __ IncrementCounter(counters->keyed_call_generic_slow(), 1);
GenerateMiss(masm, argc);
__ bind(&index_string);
// -- rsp[0] : return address
// -----------------------------------
- __ IncrementCounter(COUNTERS->load_miss(), 1);
+ Counters* counters = masm->isolate()->counters();
+ __ IncrementCounter(counters->load_miss(), 1);
__ pop(rbx);
__ push(rax); // receiver
// -- rsp[0] : return address
// -----------------------------------
- __ IncrementCounter(COUNTERS->keyed_load_miss(), 1);
+ Counters* counters = masm->isolate()->counters();
+ __ IncrementCounter(counters->keyed_load_miss(), 1);
__ pop(rbx);
__ push(rdx); // receiver
GenerateStringDictionaryReceiverCheck(masm, rdx, rbx, rdi, &miss);
GenerateDictionaryStore(masm, &miss, rbx, rcx, rax, r8, r9);
- __ IncrementCounter(COUNTERS->store_normal_hit(), 1);
+ Counters* counters = masm->isolate()->counters();
+ __ IncrementCounter(counters->store_normal_hit(), 1);
__ ret(0);
__ bind(&miss);
- __ IncrementCounter(COUNTERS->store_normal_miss(), 1);
+ __ IncrementCounter(counters->store_normal_miss(), 1);
GenerateMiss(masm);
}
Register r0,
Register r1) {
ASSERT(name->IsSymbol());
- __ IncrementCounter(COUNTERS->negative_lookups(), 1);
- __ IncrementCounter(COUNTERS->negative_lookups_miss(), 1);
+ Counters* counters = masm->isolate()->counters();
+ __ IncrementCounter(counters->negative_lookups(), 1);
+ __ IncrementCounter(counters->negative_lookups_miss(), 1);
Label done;
__ movq(r0, FieldOperand(receiver, HeapObject::kMapOffset));
}
__ bind(&done);
- __ DecrementCounter(COUNTERS->negative_lookups_miss(), 1);
+ __ DecrementCounter(counters->negative_lookups_miss(), 1);
}
(depth2 != kInvalidProtoDepth);
}
- __ IncrementCounter(COUNTERS->call_const_interceptor(), 1);
+ Counters* counters = masm->isolate()->counters();
+ __ IncrementCounter(counters->call_const_interceptor(), 1);
if (can_do_fast_api_call) {
- __ IncrementCounter(COUNTERS->call_const_interceptor_fast_api(), 1);
+ __ IncrementCounter(counters->call_const_interceptor_fast_api(), 1);
ReserveSpaceForFastApiCall(masm, scratch1);
}
// Check that the receiver isn't a smi.
__ JumpIfSmi(rdx, &miss_before_stack_reserved);
- __ IncrementCounter(COUNTERS->call_const(), 1);
- __ IncrementCounter(COUNTERS->call_const_fast_api(), 1);
+ Counters* counters = masm()->isolate()->counters();
+ __ IncrementCounter(counters->call_const(), 1);
+ __ IncrementCounter(counters->call_const_fast_api(), 1);
// Allocate space for v8::Arguments implicit values. Must be initialized
// before calling any runtime function.
// unless we're doing a receiver map check.
ASSERT(!object->IsGlobalObject() || check == RECEIVER_MAP_CHECK);
+ Counters* counters = masm()->isolate()->counters();
SharedFunctionInfo* function_info = function->shared();
switch (check) {
case RECEIVER_MAP_CHECK:
- __ IncrementCounter(COUNTERS->call_const(), 1);
+ __ IncrementCounter(counters->call_const(), 1);
// Check that the maps haven't changed.
CheckPrototypes(JSObject::cast(object), rdx, holder,
__ movq(rsi, FieldOperand(rdi, JSFunction::kContextOffset));
// Jump to the cached code (tail call).
- __ IncrementCounter(COUNTERS->call_global_inline(), 1);
+ Counters* counters = masm()->isolate()->counters();
+ __ IncrementCounter(counters->call_global_inline(), 1);
ASSERT(function->is_compiled());
ParameterCount expected(function->shared()->formal_parameter_count());
if (V8::UseCrankshaft()) {
}
// Handle call cache miss.
__ bind(&miss);
- __ IncrementCounter(COUNTERS->call_global_inline_miss(), 1);
+ __ IncrementCounter(counters->call_global_inline_miss(), 1);
MaybeObject* maybe_result = GenerateMissBranch();
if (maybe_result->IsFailure()) return maybe_result;
__ movq(FieldOperand(rbx, JSGlobalPropertyCell::kValueOffset), rax);
// Return the value (register rax).
- __ IncrementCounter(COUNTERS->named_store_global_inline(), 1);
+ Counters* counters = masm()->isolate()->counters();
+ __ IncrementCounter(counters->named_store_global_inline(), 1);
__ ret(0);
// Handle store cache miss.
__ bind(&miss);
- __ IncrementCounter(COUNTERS->named_store_global_inline_miss(), 1);
+ __ IncrementCounter(counters->named_store_global_inline_miss(), 1);
Handle<Code> ic(Isolate::Current()->builtins()->builtin(
Builtins::StoreIC_Miss));
__ Jump(ic, RelocInfo::CODE_TARGET);
// -----------------------------------
Label miss;
- __ IncrementCounter(COUNTERS->keyed_store_field(), 1);
+ Counters* counters = masm()->isolate()->counters();
+ __ IncrementCounter(counters->keyed_store_field(), 1);
// Check that the name has not changed.
__ Cmp(rcx, Handle<String>(name));
// Handle store cache miss.
__ bind(&miss);
- __ DecrementCounter(COUNTERS->keyed_store_field(), 1);
+ __ DecrementCounter(counters->keyed_store_field(), 1);
Handle<Code> ic(Isolate::Current()->builtins()->builtin(
Builtins::KeyedStoreIC_Miss));
__ Jump(ic, RelocInfo::CODE_TARGET);
__ Check(not_equal, "DontDelete cells can't contain the hole");
}
- __ IncrementCounter(COUNTERS->named_load_global_stub(), 1);
+ Counters* counters = masm()->isolate()->counters();
+ __ IncrementCounter(counters->named_load_global_stub(), 1);
__ movq(rax, rbx);
__ ret(0);
__ bind(&miss);
- __ IncrementCounter(COUNTERS->named_load_global_stub_miss(), 1);
+ __ IncrementCounter(counters->named_load_global_stub_miss(), 1);
GenerateLoadMiss(masm(), Code::LOAD_IC);
// Return the generated code.
// -----------------------------------
Label miss;
- __ IncrementCounter(COUNTERS->keyed_load_field(), 1);
+ Counters* counters = masm()->isolate()->counters();
+ __ IncrementCounter(counters->keyed_load_field(), 1);
// Check that the name has not changed.
__ Cmp(rax, Handle<String>(name));
GenerateLoadField(receiver, holder, rdx, rbx, rcx, rdi, index, name, &miss);
__ bind(&miss);
- __ DecrementCounter(COUNTERS->keyed_load_field(), 1);
+ __ DecrementCounter(counters->keyed_load_field(), 1);
GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
// Return the generated code.
// -----------------------------------
Label miss;
- __ IncrementCounter(COUNTERS->keyed_load_callback(), 1);
+ Counters* counters = masm()->isolate()->counters();
+ __ IncrementCounter(counters->keyed_load_callback(), 1);
// Check that the name has not changed.
__ Cmp(rax, Handle<String>(name));
__ bind(&miss);
- __ DecrementCounter(COUNTERS->keyed_load_callback(), 1);
+ __ DecrementCounter(counters->keyed_load_callback(), 1);
GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
// Return the generated code.
// -----------------------------------
Label miss;
- __ IncrementCounter(COUNTERS->keyed_load_constant_function(), 1);
+ Counters* counters = masm()->isolate()->counters();
+ __ IncrementCounter(counters->keyed_load_constant_function(), 1);
// Check that the name has not changed.
__ Cmp(rax, Handle<String>(name));
GenerateLoadConstant(receiver, holder, rdx, rbx, rcx, rdi,
value, name, &miss);
__ bind(&miss);
- __ DecrementCounter(COUNTERS->keyed_load_constant_function(), 1);
+ __ DecrementCounter(counters->keyed_load_constant_function(), 1);
GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
// Return the generated code.
// -----------------------------------
Label miss;
- __ IncrementCounter(COUNTERS->keyed_load_interceptor(), 1);
+ Counters* counters = masm()->isolate()->counters();
+ __ IncrementCounter(counters->keyed_load_interceptor(), 1);
// Check that the name has not changed.
__ Cmp(rax, Handle<String>(name));
name,
&miss);
__ bind(&miss);
- __ DecrementCounter(COUNTERS->keyed_load_interceptor(), 1);
+ __ DecrementCounter(counters->keyed_load_interceptor(), 1);
GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
// Return the generated code.
// -----------------------------------
Label miss;
- __ IncrementCounter(COUNTERS->keyed_load_array_length(), 1);
+ Counters* counters = masm()->isolate()->counters();
+ __ IncrementCounter(counters->keyed_load_array_length(), 1);
// Check that the name has not changed.
__ Cmp(rax, Handle<String>(name));
GenerateLoadArrayLength(masm(), rdx, rcx, &miss);
__ bind(&miss);
- __ DecrementCounter(COUNTERS->keyed_load_array_length(), 1);
+ __ DecrementCounter(counters->keyed_load_array_length(), 1);
GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
// Return the generated code.
// -----------------------------------
Label miss;
- __ IncrementCounter(COUNTERS->keyed_load_string_length(), 1);
+ Counters* counters = masm()->isolate()->counters();
+ __ IncrementCounter(counters->keyed_load_string_length(), 1);
// Check that the name has not changed.
__ Cmp(rax, Handle<String>(name));
GenerateLoadStringLength(masm(), rdx, rcx, rbx, &miss, true);
__ bind(&miss);
- __ DecrementCounter(COUNTERS->keyed_load_string_length(), 1);
+ __ DecrementCounter(counters->keyed_load_string_length(), 1);
GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
// Return the generated code.
// -----------------------------------
Label miss;
- __ IncrementCounter(COUNTERS->keyed_load_function_prototype(), 1);
+ Counters* counters = masm()->isolate()->counters();
+ __ IncrementCounter(counters->keyed_load_function_prototype(), 1);
// Check that the name has not changed.
__ Cmp(rax, Handle<String>(name));
GenerateLoadFunctionPrototype(masm(), rdx, rcx, rbx, &miss);
__ bind(&miss);
- __ DecrementCounter(COUNTERS->keyed_load_function_prototype(), 1);
+ __ DecrementCounter(counters->keyed_load_function_prototype(), 1);
GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
// Return the generated code.
__ pop(rcx);
__ lea(rsp, Operand(rsp, rbx, times_pointer_size, 1 * kPointerSize));
__ push(rcx);
- __ IncrementCounter(COUNTERS->constructed_objects(), 1);
- __ IncrementCounter(COUNTERS->constructed_objects_stub(), 1);
+ Counters* counters = masm()->isolate()->counters();
+ __ IncrementCounter(counters->constructed_objects(), 1);
+ __ IncrementCounter(counters->constructed_objects_stub(), 1);
__ ret(0);
// Jump to the generic stub in case the specialized code cannot handle the
// Slow case: Jump to runtime.
__ bind(&slow);
- __ IncrementCounter(COUNTERS->keyed_load_external_array_slow(), 1);
+ Counters* counters = masm()->isolate()->counters();
+ __ IncrementCounter(counters->keyed_load_external_array_slow(), 1);
// ----------- S t a t e -------------
// -- rax : key