__ AllocateAsciiConsString(r7, r6, r4, r5, &call_runtime);
__ bind(&allocated);
// Fill the fields of the cons string.
+ Label skip_write_barrier, after_writing;
+ ExternalReference high_promotion_mode = ExternalReference::
+ new_space_high_promotion_mode_active_address(masm->isolate());
+ __ mov(r4, Operand(high_promotion_mode));
+ __ ldr(r4, MemOperand(r4, 0));
+ __ cmp(r4, Operand::Zero());
+ __ b(eq, &skip_write_barrier);
+
+ __ str(r0, FieldMemOperand(r7, ConsString::kFirstOffset));
+ __ RecordWriteField(r7,
+ ConsString::kFirstOffset,
+ r0,
+ r4,
+ kLRHasNotBeenSaved,
+ kDontSaveFPRegs);
+ __ str(r1, FieldMemOperand(r7, ConsString::kSecondOffset));
+ __ RecordWriteField(r7,
+ ConsString::kSecondOffset,
+ r1,
+ r4,
+ kLRHasNotBeenSaved,
+ kDontSaveFPRegs);
+ __ jmp(&after_writing);
+
+ __ bind(&skip_write_barrier);
__ str(r0, FieldMemOperand(r7, ConsString::kFirstOffset));
__ str(r1, FieldMemOperand(r7, ConsString::kSecondOffset));
+
+ __ bind(&after_writing);
+
__ mov(r0, Operand(r7));
__ IncrementCounter(counters->string_add_native(), 1, r2, r3);
__ add(sp, sp, Operand(2 * kPointerSize));
{ REG(r5), REG(r0), REG(r6), EMIT_REMEMBERED_SET },
// FastNewClosureStub::Generate
{ REG(r2), REG(r4), REG(r1), EMIT_REMEMBERED_SET },
+ // StringAddStub::Generate
+ { REG(r7), REG(r1), REG(r4), EMIT_REMEMBERED_SET },
+ { REG(r7), REG(r0), REG(r4), EMIT_REMEMBERED_SET },
// Null termination.
{ REG(no_reg), REG(no_reg), REG(no_reg), EMIT_REMEMBERED_SET}
};
Register scratch1,
Register scratch2,
Label* gc_required) {
- Allocate(ConsString::kSize, result, scratch1, scratch2, gc_required,
- TAG_OBJECT);
+ Label allocate_new_space, install_map;
+ AllocationFlags flags = TAG_OBJECT;
+
+ ExternalReference high_promotion_mode = ExternalReference::
+ new_space_high_promotion_mode_active_address(isolate());
+ mov(scratch1, Operand(high_promotion_mode));
+ ldr(scratch1, MemOperand(r4, 0));
+ cmp(scratch1, Operand::Zero());
+ b(eq, &allocate_new_space);
+
+ Allocate(ConsString::kSize,
+ result,
+ scratch1,
+ scratch2,
+ gc_required,
+ static_cast<AllocationFlags>(flags | PRETENURE_OLD_POINTER_SPACE));
+
+ jmp(&install_map);
+
+ bind(&allocate_new_space);
+ Allocate(ConsString::kSize,
+ result,
+ scratch1,
+ scratch2,
+ gc_required,
+ flags);
+
+ bind(&install_map);
InitializeNewString(result,
length,
}
+ExternalReference ExternalReference::
+ new_space_high_promotion_mode_active_address(Isolate* isolate) {
+ return ExternalReference(
+ isolate->heap()->NewSpaceHighPromotionModeActiveAddress());
+}
+
+
ExternalReference ExternalReference::handle_scope_level_address(
Isolate* isolate) {
return ExternalReference(HandleScope::current_level_address(isolate));
Isolate* isolate);
static ExternalReference old_data_space_allocation_limit_address(
Isolate* isolate);
+ static ExternalReference new_space_high_promotion_mode_active_address(
+ Isolate* isolate);
static ExternalReference double_fp_operation(Token::Value operation,
Isolate* isolate);
MaybeObject* Heap::NumberToString(Object* number,
- bool check_number_string_cache) {
+ bool check_number_string_cache,
+ PretenureFlag pretenure) {
isolate_->counters()->number_to_string_runtime()->Increment();
if (check_number_string_cache) {
Object* cached = GetNumberStringCache(number);
}
Object* js_string;
- MaybeObject* maybe_js_string = AllocateStringFromOneByte(CStrVector(str));
+ MaybeObject* maybe_js_string =
+ AllocateStringFromOneByte(CStrVector(str), pretenure);
if (maybe_js_string->ToObject(&js_string)) {
SetNumberStringCache(number, String::cast(js_string));
}
return new_space_high_promotion_mode_active_;
}
+ inline PretenureFlag GetPretenureMode() {
+ return new_space_high_promotion_mode_active_ ? TENURED : NOT_TENURED;
+ }
+
+ inline Address* NewSpaceHighPromotionModeActiveAddress() {
+ return reinterpret_cast<Address*>(&new_space_high_promotion_mode_active_);
+ }
+
inline intptr_t PromotedTotalSize() {
return PromotedSpaceSizeOfObjects() + PromotedExternalMemorySize();
}
static bool RootCanBeWrittenAfterInitialization(RootListIndex root_index);
MUST_USE_RESULT MaybeObject* NumberToString(
- Object* number, bool check_number_string_cache = true);
+ Object* number, bool check_number_string_cache = true,
+ PretenureFlag pretenure = NOT_TENURED);
MUST_USE_RESULT MaybeObject* Uint32ToString(
uint32_t value, bool check_number_string_cache = true);
// Indicates that the new space should be kept small due to high promotion
// rates caused by the mutator allocating a lot of long-lived objects.
- bool new_space_high_promotion_mode_active_;
+ // TODO(hpayer): change to bool if no longer accessed from generated code
+ intptr_t new_space_high_promotion_mode_active_;
// Limit that triggers a global GC on the next (normally caused) GC. This
// is checked when we have already decided to do a GC to help determine
__ mov(FieldOperand(ecx, ConsString::kLengthOffset), ebx);
__ mov(FieldOperand(ecx, ConsString::kHashFieldOffset),
Immediate(String::kEmptyHashField));
+
+ Label skip_write_barrier, after_writing;
+ ExternalReference high_promotion_mode = ExternalReference::
+ new_space_high_promotion_mode_active_address(masm->isolate());
+ __ test(Operand::StaticVariable(high_promotion_mode), Immediate(1));
+ __ j(zero, &skip_write_barrier);
+
+ __ mov(FieldOperand(ecx, ConsString::kFirstOffset), eax);
+ __ RecordWriteField(ecx,
+ ConsString::kFirstOffset,
+ eax,
+ ebx,
+ kDontSaveFPRegs);
+ __ mov(FieldOperand(ecx, ConsString::kSecondOffset), edx);
+ __ RecordWriteField(ecx,
+ ConsString::kSecondOffset,
+ edx,
+ ebx,
+ kDontSaveFPRegs);
+ __ jmp(&after_writing);
+
+ __ bind(&skip_write_barrier);
__ mov(FieldOperand(ecx, ConsString::kFirstOffset), eax);
__ mov(FieldOperand(ecx, ConsString::kSecondOffset), edx);
+
+ __ bind(&after_writing);
+
__ mov(eax, ecx);
__ IncrementCounter(counters->string_add_native(), 1);
__ ret(2 * kPointerSize);
{ REG(edx), REG(eax), REG(edi), EMIT_REMEMBERED_SET},
// StoreArrayLiteralElementStub::Generate
{ REG(ebx), REG(eax), REG(ecx), EMIT_REMEMBERED_SET},
- // FastNewClosureStub
+ // FastNewClosureStub and StringAddStub::Generate
{ REG(ecx), REG(edx), REG(ebx), EMIT_REMEMBERED_SET},
+ // StringAddStub::Generate
+ { REG(ecx), REG(eax), REG(ebx), EMIT_REMEMBERED_SET},
// Null termination.
{ REG(no_reg), REG(no_reg), REG(no_reg), EMIT_REMEMBERED_SET}
};
Register scratch1,
Register scratch2,
Label* gc_required) {
- // Allocate heap number in new space.
- Allocate(ConsString::kSize, result, scratch1, scratch2, gc_required,
- TAG_OBJECT);
+ Label allocate_new_space, install_map;
+ AllocationFlags flags = TAG_OBJECT;
+
+ ExternalReference high_promotion_mode = ExternalReference::
+ new_space_high_promotion_mode_active_address(isolate());
+
+ test(Operand::StaticVariable(high_promotion_mode), Immediate(1));
+ j(zero, &allocate_new_space);
+
+ Allocate(ConsString::kSize,
+ result,
+ scratch1,
+ scratch2,
+ gc_required,
+ static_cast<AllocationFlags>(flags | PRETENURE_OLD_POINTER_SPACE));
+ jmp(&install_map);
+
+ bind(&allocate_new_space);
+ Allocate(ConsString::kSize,
+ result,
+ scratch1,
+ scratch2,
+ gc_required,
+ flags);
+ bind(&install_map);
// Set the map. The other fields are left uninitialized.
mov(FieldOperand(result, HeapObject::kMapOffset),
Immediate(isolate()->factory()->cons_ascii_string_map()));
constant_properties,
&is_result_from_cache);
- Handle<JSObject> boilerplate = isolate->factory()->NewJSObjectFromMap(map);
+ Handle<JSObject> boilerplate =
+ isolate->factory()->NewJSObjectFromMap(
+ map, isolate->heap()->GetPretenureMode());
// Normalize the elements of the boilerplate to save space if needed.
if (!should_have_fast_elements) JSObject::NormalizeElements(boilerplate);
// Create the JSArray.
Handle<JSFunction> constructor(
JSFunction::NativeContextFromLiterals(*literals)->array_function());
- Handle<JSArray> object =
- Handle<JSArray>::cast(isolate->factory()->NewJSObject(constructor));
+
+ Handle<JSArray> object = Handle<JSArray>::cast(
+ isolate->factory()->NewJSObject(
+ constructor, isolate->heap()->GetPretenureMode()));
ElementsKind constant_elements_kind =
static_cast<ElementsKind>(Smi::cast(elements->get(0))->value());
Object* number = args[0];
RUNTIME_ASSERT(number->IsNumber());
- return isolate->heap()->NumberToString(number, false);
+ return isolate->heap()->NumberToString(
+ number, false, isolate->heap()->GetPretenureMode());
}
UNCLASSIFIED,
58,
"Runtime::AllocateInOldPointerSpace");
+ Add(ExternalReference::new_space_high_promotion_mode_active_address(isolate).
+ address(),
+ UNCLASSIFIED,
+ 59,
+ "Heap::NewSpaceAllocationLimitAddress");
// Add a small set of deopt entry addresses to encoder without generating the
// deopt table code, which isn't possible at deserialization time.
entry,
Deoptimizer::LAZY,
Deoptimizer::CALCULATE_ENTRY_ADDRESS);
- Add(address, LAZY_DEOPTIMIZATION, 59 + entry, "lazy_deopt");
+ Add(address, LAZY_DEOPTIMIZATION, 60 + entry, "lazy_deopt");
}
}
__ movq(FieldOperand(rcx, ConsString::kLengthOffset), rbx);
__ movq(FieldOperand(rcx, ConsString::kHashFieldOffset),
Immediate(String::kEmptyHashField));
+
+ Label skip_write_barrier, after_writing;
+ ExternalReference high_promotion_mode = ExternalReference::
+ new_space_high_promotion_mode_active_address(masm->isolate());
+ __ Load(rbx, high_promotion_mode);
+ __ testb(rbx, Immediate(1));
+ __ j(zero, &skip_write_barrier);
+
+ __ movq(FieldOperand(rcx, ConsString::kFirstOffset), rax);
+ __ RecordWriteField(rcx,
+ ConsString::kFirstOffset,
+ rax,
+ rbx,
+ kDontSaveFPRegs);
+ __ movq(FieldOperand(rcx, ConsString::kSecondOffset), rdx);
+ __ RecordWriteField(rcx,
+ ConsString::kSecondOffset,
+ rdx,
+ rbx,
+ kDontSaveFPRegs);
+ __ jmp(&after_writing);
+
+ __ bind(&skip_write_barrier);
__ movq(FieldOperand(rcx, ConsString::kFirstOffset), rax);
__ movq(FieldOperand(rcx, ConsString::kSecondOffset), rdx);
+
+ __ bind(&after_writing);
+
__ movq(rax, rcx);
__ IncrementCounter(counters->string_add_native(), 1);
__ ret(2 * kPointerSize);
{ REG(r11), REG(rax), REG(r15), EMIT_REMEMBERED_SET},
// StoreArrayLiteralElementStub::Generate
{ REG(rbx), REG(rax), REG(rcx), EMIT_REMEMBERED_SET},
- // FastNewClosureStub::Generate
+ // FastNewClosureStub::Generate and
+ // StringAddStub::Generate
{ REG(rcx), REG(rdx), REG(rbx), EMIT_REMEMBERED_SET},
+ // StringAddStub::Generate
+ { REG(rcx), REG(rax), REG(rbx), EMIT_REMEMBERED_SET},
// Null termination.
{ REG(no_reg), REG(no_reg), REG(no_reg), EMIT_REMEMBERED_SET}
};
Register scratch1,
Register scratch2,
Label* gc_required) {
- // Allocate heap number in new space.
- Allocate(ConsString::kSize, result, scratch1, scratch2, gc_required,
- TAG_OBJECT);
+ Label allocate_new_space, install_map;
+ AllocationFlags flags = TAG_OBJECT;
+
+ ExternalReference high_promotion_mode = ExternalReference::
+ new_space_high_promotion_mode_active_address(isolate());
+
+ Load(scratch1, high_promotion_mode);
+ testb(scratch1, Immediate(1));
+ j(zero, &allocate_new_space);
+ Allocate(ConsString::kSize,
+ result,
+ scratch1,
+ scratch2,
+ gc_required,
+ static_cast<AllocationFlags>(flags | PRETENURE_OLD_POINTER_SPACE));
+
+ jmp(&install_map);
+
+ bind(&allocate_new_space);
+ Allocate(ConsString::kSize,
+ result,
+ scratch1,
+ scratch2,
+ gc_required,
+ flags);
+
+ bind(&install_map);
// Set the map. The other fields are left uninitialized.
LoadRoot(kScratchRegister, Heap::kConsAsciiStringMapRootIndex);