MaybeObject* maybe_obj = heap->AllocateUninitializedFixedArray(capacity);
if (!maybe_obj->To(&new_elms)) return maybe_obj;
- if (len > 0) {
- ElementsAccessor* accessor = array->GetElementsAccessor();
- MaybeObject* maybe_failure =
- accessor->CopyElements(NULL, 0, new_elms, kind, 0, len, elms_obj);
- ASSERT(!maybe_failure->IsFailure());
- USE(maybe_failure);
- }
- FillWithHoles(heap, new_elms, new_length, capacity);
+ ElementsAccessor* accessor = array->GetElementsAccessor();
+ MaybeObject* maybe_failure = accessor->CopyElements(
+ NULL, 0, new_elms, kind, 0,
+ ElementsAccessor::kCopyToEndAndInitializeToHole, elms_obj);
+ ASSERT(!maybe_failure->IsFailure());
+ USE(maybe_failure);
elms = new_elms;
}
heap->AllocateUninitializedFixedDoubleArray(capacity);
if (!maybe_obj->To(&new_elms)) return maybe_obj;
- if (len > 0) {
- ElementsAccessor* accessor = array->GetElementsAccessor();
- MaybeObject* maybe_failure =
- accessor->CopyElements(NULL, 0, new_elms, kind, 0, len, elms_obj);
- ASSERT(!maybe_failure->IsFailure());
- USE(maybe_failure);
- }
-
- FillWithHoles(new_elms, len + to_add, new_elms->length());
+ ElementsAccessor* accessor = array->GetElementsAccessor();
+ MaybeObject* maybe_failure = accessor->CopyElements(
+ NULL, 0, new_elms, kind, 0,
+ ElementsAccessor::kCopyToEndAndInitializeToHole, elms_obj);
+ ASSERT(!maybe_failure->IsFailure());
+ USE(maybe_failure);
} else {
// to_add is > 0 and new_length <= elms_len, so elms_obj cannot be the
// empty_fixed_array.
MaybeObject* maybe_elms = heap->AllocateUninitializedFixedArray(capacity);
if (!maybe_elms->To(&new_elms)) return maybe_elms;
- if (len > 0) {
- ElementsKind kind = array->GetElementsKind();
- ElementsAccessor* accessor = array->GetElementsAccessor();
- MaybeObject* maybe_failure =
- accessor->CopyElements(NULL, 0, new_elms, kind, to_add, len, elms);
- ASSERT(!maybe_failure->IsFailure());
- USE(maybe_failure);
- }
+ ElementsKind kind = array->GetElementsKind();
+ ElementsAccessor* accessor = array->GetElementsAccessor();
+ MaybeObject* maybe_failure = accessor->CopyElements(
+ NULL, 0, new_elms, kind, to_add,
+ ElementsAccessor::kCopyToEndAndInitializeToHole, elms);
+ ASSERT(!maybe_failure->IsFailure());
+ USE(maybe_failure);
- FillWithHoles(heap, new_elms, new_length, capacity);
elms = new_elms;
array->set_elements(elms);
} else {
ASSERT(!maybe_failure->IsFailure());
USE(maybe_failure);
}
- const int to_copy = len - actual_delete_count - actual_start;
- if (to_copy > 0) {
- MaybeObject* maybe_failure = accessor->CopyElements(
- NULL, actual_start + actual_delete_count, new_elms, kind,
- actual_start + item_count, to_copy, elms);
- ASSERT(!maybe_failure->IsFailure());
- USE(maybe_failure);
- }
-
- FillWithHoles(heap, new_elms, new_length, capacity);
+ MaybeObject* maybe_failure = accessor->CopyElements(
+ NULL, actual_start + actual_delete_count, new_elms, kind,
+ actual_start + item_count,
+ ElementsAccessor::kCopyToEndAndInitializeToHole, elms);
+ ASSERT(!maybe_failure->IsFailure());
+ USE(maybe_failure);
elms_obj = new_elms;
elms_changed = true;
uint32_t to_start,
int raw_copy_size) {
ASSERT(to->map() != HEAP->fixed_cow_array_map());
+ AssertNoAllocation no_allocation;
int copy_size = raw_copy_size;
if (raw_copy_size < 0) {
ASSERT(raw_copy_size == ElementsAccessor::kCopyToEnd ||
raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole);
copy_size = Min(from->length() - from_start,
to->length() - to_start);
-#ifdef DEBUG
- // FAST_*_ELEMENTS arrays cannot be uninitialized. Ensure they are already
- // marked with the hole.
if (raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole) {
- for (int i = to_start + copy_size; i < to->length(); ++i) {
- ASSERT(to->get(i)->IsTheHole());
+ int start = to_start + copy_size;
+ int length = to->length() - start;
+ if (length > 0) {
+ Heap* heap = from->GetHeap();
+ MemsetPointer(to->data_start() + start, heap->the_hole_value(), length);
}
}
-#endif
}
ASSERT((copy_size + static_cast<int>(to_start)) <= to->length() &&
(copy_size + static_cast<int>(from_start)) <= from->length());
ElementsKind to_kind,
uint32_t to_start,
int raw_copy_size) {
+ AssertNoAllocation no_allocation;
int copy_size = raw_copy_size;
Heap* heap = from->GetHeap();
if (raw_copy_size < 0) {
ASSERT(raw_copy_size == ElementsAccessor::kCopyToEnd ||
raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole);
copy_size = from->max_number_key() + 1 - from_start;
-#ifdef DEBUG
- // Fast object arrays cannot be uninitialized. Ensure they are already
- // marked with the hole.
if (raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole) {
- for (int i = to_start + copy_size; i < to->length(); ++i) {
- ASSERT(to->get(i)->IsTheHole());
+ int start = to_start + copy_size;
+ int length = to->length() - start;
+ if (length > 0) {
+ Heap* heap = from->GetHeap();
+ MemsetPointer(to->data_start() + start, heap->the_hole_value(), length);
}
}
-#endif
}
ASSERT(to != from);
ASSERT(IsFastSmiOrObjectElementsKind(to_kind));
raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole);
copy_size = Min(from->length() - from_start,
to->length() - to_start);
-#ifdef DEBUG
- // FAST_*_ELEMENTS arrays cannot be uninitialized. Ensure they are already
- // marked with the hole.
if (raw_copy_size == ElementsAccessor::kCopyToEndAndInitializeToHole) {
- for (int i = to_start + copy_size; i < to->length(); ++i) {
- ASSERT(to->get(i)->IsTheHole());
+ // Also initialize the area that will be copied over since HeapNumber
+ // allocation below can cause an incremental marking step, requiring all
+ // existing heap objects to be propertly initialized.
+ int start = to_start;
+ int length = to->length() - start;
+ if (length > 0) {
+ Heap* heap = from->GetHeap();
+ MemsetPointer(to->data_start() + start, heap->the_hole_value(), length);
}
}
-#endif
}
ASSERT((copy_size + static_cast<int>(to_start)) <= to->length() &&
(copy_size + static_cast<int>(from_start)) <= from->length());
// Allocate a new fast elements backing store.
FixedArray* new_elements;
- { MaybeObject* maybe = heap->AllocateFixedArrayWithHoles(capacity);
- if (!maybe->To(&new_elements)) return maybe;
- }
+ MaybeObject* maybe = heap->AllocateUninitializedFixedArray(capacity);
+ if (!maybe->To(&new_elements)) return maybe;
ElementsKind elements_kind = GetElementsKind();
ElementsKind new_elements_kind;
}
FixedArrayBase* old_elements = elements();
ElementsAccessor* accessor = ElementsAccessor::ForKind(elements_kind);
- { MaybeObject* maybe_obj =
- accessor->CopyElements(this, new_elements, new_elements_kind);
- if (maybe_obj->IsFailure()) return maybe_obj;
- }
+ MaybeObject* maybe_obj =
+ accessor->CopyElements(this, new_elements, new_elements_kind);
+ if (maybe_obj->IsFailure()) return maybe_obj;
+
if (elements_kind != NON_STRICT_ARGUMENTS_ELEMENTS) {
Map* new_map = map();
if (new_elements_kind != elements_kind) {