1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
31 #include "arguments.h"
32 #include "bootstrapper.h"
34 #include "cpu-profiler.h"
37 #include "heap-profiler.h"
38 #include "mark-compact.h"
39 #include "stub-cache.h"
40 #include "vm-state-inl.h"
47 // Arguments object passed to C++ builtins.
48 template <BuiltinExtraArguments extra_args>
49 class BuiltinArguments : public Arguments {
51 BuiltinArguments(int length, Object** arguments)
52 : Arguments(length, arguments) { }
54 Object*& operator[] (int index) {
55 ASSERT(index < length());
56 return Arguments::operator[](index);
59 template <class S> Handle<S> at(int index) {
60 ASSERT(index < length());
61 return Arguments::at<S>(index);
64 Handle<Object> receiver() {
65 return Arguments::at<Object>(0);
68 Handle<JSFunction> called_function() {
69 STATIC_ASSERT(extra_args == NEEDS_CALLED_FUNCTION);
70 return Arguments::at<JSFunction>(Arguments::length() - 1);
73 // Gets the total number of arguments including the receiver (but
74 // excluding extra arguments).
76 STATIC_ASSERT(extra_args == NO_EXTRA_ARGUMENTS);
77 return Arguments::length();
82 // Check we have at least the receiver.
83 ASSERT(Arguments::length() >= 1);
89 // Specialize BuiltinArguments for the called function extra argument.
92 int BuiltinArguments<NEEDS_CALLED_FUNCTION>::length() const {
93 return Arguments::length() - 1;
98 void BuiltinArguments<NEEDS_CALLED_FUNCTION>::Verify() {
99 // Check we have at least the receiver and the called function.
100 ASSERT(Arguments::length() >= 2);
101 // Make sure cast to JSFunction succeeds.
107 #define DEF_ARG_TYPE(name, spec) \
108 typedef BuiltinArguments<spec> name##ArgumentsType;
109 BUILTIN_LIST_C(DEF_ARG_TYPE)
114 // ----------------------------------------------------------------------------
115 // Support macro for defining builtins in C++.
116 // ----------------------------------------------------------------------------
118 // A builtin function is defined by writing:
124 // In the body of the builtin function the arguments can be accessed
125 // through the BuiltinArguments object args.
129 #define BUILTIN(name) \
130 MUST_USE_RESULT static MaybeObject* Builtin_Impl_##name( \
131 name##ArgumentsType args, Isolate* isolate); \
132 MUST_USE_RESULT static MaybeObject* Builtin_##name( \
133 int args_length, Object** args_object, Isolate* isolate) { \
134 name##ArgumentsType args(args_length, args_object); \
136 return Builtin_Impl_##name(args, isolate); \
138 MUST_USE_RESULT static MaybeObject* Builtin_Impl_##name( \
139 name##ArgumentsType args, Isolate* isolate)
141 #else // For release mode.
143 #define BUILTIN(name) \
144 static MaybeObject* Builtin_impl##name( \
145 name##ArgumentsType args, Isolate* isolate); \
146 static MaybeObject* Builtin_##name( \
147 int args_length, Object** args_object, Isolate* isolate) { \
148 name##ArgumentsType args(args_length, args_object); \
149 return Builtin_impl##name(args, isolate); \
151 static MaybeObject* Builtin_impl##name( \
152 name##ArgumentsType args, Isolate* isolate)
157 static inline bool CalledAsConstructor(Isolate* isolate) {
158 // Calculate the result using a full stack frame iterator and check
159 // that the state of the stack is as we assume it to be in the
161 StackFrameIterator it(isolate);
162 ASSERT(it.frame()->is_exit());
164 StackFrame* frame = it.frame();
165 bool reference_result = frame->is_construct();
166 Address fp = Isolate::c_entry_fp(isolate->thread_local_top());
167 // Because we know fp points to an exit frame we can use the relevant
168 // part of ExitFrame::ComputeCallerState directly.
169 const int kCallerOffset = ExitFrameConstants::kCallerFPOffset;
170 Address caller_fp = Memory::Address_at(fp + kCallerOffset);
171 // This inlines the part of StackFrame::ComputeType that grabs the
172 // type of the current frame. Note that StackFrame::ComputeType
173 // has been specialized for each architecture so if any one of them
174 // changes this code has to be changed as well.
175 const int kMarkerOffset = StandardFrameConstants::kMarkerOffset;
176 const Smi* kConstructMarker = Smi::FromInt(StackFrame::CONSTRUCT);
177 Object* marker = Memory::Object_at(caller_fp + kMarkerOffset);
178 bool result = (marker == kConstructMarker);
179 ASSERT_EQ(result, reference_result);
185 // ----------------------------------------------------------------------------
189 return isolate->heap()->undefined_value(); // Make compiler happy.
193 BUILTIN(EmptyFunction) {
194 return isolate->heap()->undefined_value();
198 static void MoveDoubleElements(FixedDoubleArray* dst,
200 FixedDoubleArray* src,
203 if (len == 0) return;
204 OS::MemMove(dst->data_start() + dst_index,
205 src->data_start() + src_index,
210 static void FillWithHoles(Heap* heap, FixedArray* dst, int from, int to) {
211 ASSERT(dst->map() != heap->fixed_cow_array_map());
212 MemsetPointer(dst->data_start() + from, heap->the_hole_value(), to - from);
216 static void FillWithHoles(FixedDoubleArray* dst, int from, int to) {
217 for (int i = from; i < to; i++) {
218 dst->set_the_hole(i);
223 static FixedArrayBase* LeftTrimFixedArray(Heap* heap,
224 FixedArrayBase* elms,
226 Map* map = elms->map();
228 if (elms->IsFixedArray()) {
229 entry_size = kPointerSize;
231 entry_size = kDoubleSize;
233 ASSERT(elms->map() != heap->fixed_cow_array_map());
234 // For now this trick is only applied to fixed arrays in new and paged space.
235 // In large object space the object's start must coincide with chunk
236 // and thus the trick is just not applicable.
237 ASSERT(!heap->lo_space()->Contains(elms));
239 STATIC_ASSERT(FixedArrayBase::kMapOffset == 0);
240 STATIC_ASSERT(FixedArrayBase::kLengthOffset == kPointerSize);
241 STATIC_ASSERT(FixedArrayBase::kHeaderSize == 2 * kPointerSize);
243 Object** former_start = HeapObject::RawField(elms, 0);
245 const int len = elms->length();
247 if (to_trim * entry_size > FixedArrayBase::kHeaderSize &&
248 elms->IsFixedArray() &&
249 !heap->new_space()->Contains(elms)) {
250 // If we are doing a big trim in old space then we zap the space that was
251 // formerly part of the array so that the GC (aided by the card-based
252 // remembered set) won't find pointers to new-space there.
253 Object** zap = reinterpret_cast<Object**>(elms->address());
254 zap++; // Header of filler must be at least one word so skip that.
255 for (int i = 1; i < to_trim; i++) {
256 *zap++ = Smi::FromInt(0);
259 // Technically in new space this write might be omitted (except for
260 // debug mode which iterates through the heap), but to play safer
262 heap->CreateFillerObjectAt(elms->address(), to_trim * entry_size);
264 int new_start_index = to_trim * (entry_size / kPointerSize);
265 former_start[new_start_index] = map;
266 former_start[new_start_index + 1] = Smi::FromInt(len - to_trim);
268 // Maintain marking consistency for HeapObjectIterator and
269 // IncrementalMarking.
270 int size_delta = to_trim * entry_size;
271 if (heap->marking()->TransferMark(elms->address(),
272 elms->address() + size_delta)) {
273 MemoryChunk::IncrementLiveBytesFromMutator(elms->address(), -size_delta);
276 FixedArrayBase* new_elms = FixedArrayBase::cast(HeapObject::FromAddress(
277 elms->address() + size_delta));
278 HeapProfiler* profiler = heap->isolate()->heap_profiler();
279 if (profiler->is_tracking_object_moves()) {
280 profiler->ObjectMoveEvent(elms->address(),
288 static bool ArrayPrototypeHasNoElements(Heap* heap,
289 Context* native_context,
290 JSObject* array_proto) {
291 // This method depends on non writability of Object and Array prototype
293 if (array_proto->elements() != heap->empty_fixed_array()) return false;
295 Object* proto = array_proto->GetPrototype();
296 if (proto == heap->null_value()) return false;
297 array_proto = JSObject::cast(proto);
298 if (array_proto != native_context->initial_object_prototype()) return false;
299 if (array_proto->elements() != heap->empty_fixed_array()) return false;
300 return array_proto->GetPrototype()->IsNull();
305 static inline MaybeObject* EnsureJSArrayWithWritableFastElements(
306 Heap* heap, Object* receiver, Arguments* args, int first_added_arg) {
307 if (!receiver->IsJSArray()) return NULL;
308 JSArray* array = JSArray::cast(receiver);
309 if (array->map()->is_observed()) return NULL;
310 if (!array->map()->is_extensible()) return NULL;
311 HeapObject* elms = array->elements();
312 Map* map = elms->map();
313 if (map == heap->fixed_array_map()) {
314 if (args == NULL || array->HasFastObjectElements()) return elms;
315 } else if (map == heap->fixed_cow_array_map()) {
316 MaybeObject* maybe_writable_result = array->EnsureWritableFastElements();
317 if (args == NULL || array->HasFastObjectElements() ||
318 !maybe_writable_result->To(&elms)) {
319 return maybe_writable_result;
321 } else if (map == heap->fixed_double_array_map()) {
322 if (args == NULL) return elms;
327 // Need to ensure that the arguments passed in args can be contained in
329 int args_length = args->length();
330 if (first_added_arg >= args_length) return array->elements();
332 ElementsKind origin_kind = array->map()->elements_kind();
333 ASSERT(!IsFastObjectElementsKind(origin_kind));
334 ElementsKind target_kind = origin_kind;
335 int arg_count = args->length() - first_added_arg;
336 Object** arguments = args->arguments() - first_added_arg - (arg_count - 1);
337 for (int i = 0; i < arg_count; i++) {
338 Object* arg = arguments[i];
339 if (arg->IsHeapObject()) {
340 if (arg->IsHeapNumber()) {
341 target_kind = FAST_DOUBLE_ELEMENTS;
343 target_kind = FAST_ELEMENTS;
348 if (target_kind != origin_kind) {
349 MaybeObject* maybe_failure = array->TransitionElementsKind(target_kind);
350 if (maybe_failure->IsFailure()) return maybe_failure;
351 return array->elements();
357 static inline bool IsJSArrayFastElementMovingAllowed(Heap* heap,
359 if (!FLAG_clever_optimizations) return false;
360 Context* native_context = heap->isolate()->context()->native_context();
361 JSObject* array_proto =
362 JSObject::cast(native_context->array_function()->prototype());
363 return receiver->GetPrototype() == array_proto &&
364 ArrayPrototypeHasNoElements(heap, native_context, array_proto);
368 MUST_USE_RESULT static MaybeObject* CallJsBuiltin(
371 BuiltinArguments<NO_EXTRA_ARGUMENTS> args) {
372 HandleScope handleScope(isolate);
374 Handle<Object> js_builtin =
375 GetProperty(Handle<JSObject>(isolate->native_context()->builtins()),
377 Handle<JSFunction> function = Handle<JSFunction>::cast(js_builtin);
378 int argc = args.length() - 1;
379 ScopedVector<Handle<Object> > argv(argc);
380 for (int i = 0; i < argc; ++i) {
381 argv[i] = args.at<Object>(i + 1);
383 bool pending_exception;
384 Handle<Object> result = Execution::Call(isolate,
390 if (pending_exception) return Failure::Exception();
396 Heap* heap = isolate->heap();
397 Object* receiver = *args.receiver();
398 FixedArrayBase* elms_obj;
399 MaybeObject* maybe_elms_obj =
400 EnsureJSArrayWithWritableFastElements(heap, receiver, &args, 1);
401 if (maybe_elms_obj == NULL) {
402 return CallJsBuiltin(isolate, "ArrayPush", args);
404 if (!maybe_elms_obj->To(&elms_obj)) return maybe_elms_obj;
406 JSArray* array = JSArray::cast(receiver);
407 ASSERT(!array->map()->is_observed());
409 ElementsKind kind = array->GetElementsKind();
411 if (IsFastSmiOrObjectElementsKind(kind)) {
412 FixedArray* elms = FixedArray::cast(elms_obj);
414 int len = Smi::cast(array->length())->value();
415 int to_add = args.length() - 1;
417 return Smi::FromInt(len);
419 // Currently fixed arrays cannot grow too big, so
420 // we should never hit this case.
421 ASSERT(to_add <= (Smi::kMaxValue - len));
423 int new_length = len + to_add;
425 if (new_length > elms->length()) {
426 // New backing storage is needed.
427 int capacity = new_length + (new_length >> 1) + 16;
428 FixedArray* new_elms;
429 MaybeObject* maybe_obj = heap->AllocateUninitializedFixedArray(capacity);
430 if (!maybe_obj->To(&new_elms)) return maybe_obj;
432 ElementsAccessor* accessor = array->GetElementsAccessor();
433 MaybeObject* maybe_failure = accessor->CopyElements(
434 NULL, 0, kind, new_elms, 0,
435 ElementsAccessor::kCopyToEndAndInitializeToHole, elms_obj);
436 ASSERT(!maybe_failure->IsFailure());
442 // Add the provided values.
443 DisallowHeapAllocation no_gc;
444 WriteBarrierMode mode = elms->GetWriteBarrierMode(no_gc);
445 for (int index = 0; index < to_add; index++) {
446 elms->set(index + len, args[index + 1], mode);
449 if (elms != array->elements()) {
450 array->set_elements(elms);
454 array->set_length(Smi::FromInt(new_length));
455 return Smi::FromInt(new_length);
457 int len = Smi::cast(array->length())->value();
458 int elms_len = elms_obj->length();
460 int to_add = args.length() - 1;
462 return Smi::FromInt(len);
464 // Currently fixed arrays cannot grow too big, so
465 // we should never hit this case.
466 ASSERT(to_add <= (Smi::kMaxValue - len));
468 int new_length = len + to_add;
470 FixedDoubleArray* new_elms;
472 if (new_length > elms_len) {
473 // New backing storage is needed.
474 int capacity = new_length + (new_length >> 1) + 16;
475 MaybeObject* maybe_obj =
476 heap->AllocateUninitializedFixedDoubleArray(capacity);
477 if (!maybe_obj->To(&new_elms)) return maybe_obj;
479 ElementsAccessor* accessor = array->GetElementsAccessor();
480 MaybeObject* maybe_failure = accessor->CopyElements(
481 NULL, 0, kind, new_elms, 0,
482 ElementsAccessor::kCopyToEndAndInitializeToHole, elms_obj);
483 ASSERT(!maybe_failure->IsFailure());
486 // to_add is > 0 and new_length <= elms_len, so elms_obj cannot be the
487 // empty_fixed_array.
488 new_elms = FixedDoubleArray::cast(elms_obj);
491 // Add the provided values.
492 DisallowHeapAllocation no_gc;
494 for (index = 0; index < to_add; index++) {
495 Object* arg = args[index + 1];
496 new_elms->set(index + len, arg->Number());
499 if (new_elms != array->elements()) {
500 array->set_elements(new_elms);
504 array->set_length(Smi::FromInt(new_length));
505 return Smi::FromInt(new_length);
511 Heap* heap = isolate->heap();
512 Object* receiver = *args.receiver();
513 FixedArrayBase* elms_obj;
514 MaybeObject* maybe_elms =
515 EnsureJSArrayWithWritableFastElements(heap, receiver, NULL, 0);
516 if (maybe_elms == NULL) return CallJsBuiltin(isolate, "ArrayPop", args);
517 if (!maybe_elms->To(&elms_obj)) return maybe_elms;
519 JSArray* array = JSArray::cast(receiver);
520 ASSERT(!array->map()->is_observed());
522 int len = Smi::cast(array->length())->value();
523 if (len == 0) return heap->undefined_value();
525 ElementsAccessor* accessor = array->GetElementsAccessor();
526 int new_length = len - 1;
527 MaybeObject* maybe_result;
528 if (accessor->HasElement(array, array, new_length, elms_obj)) {
529 maybe_result = accessor->Get(array, array, new_length, elms_obj);
531 maybe_result = array->GetPrototype()->GetElement(isolate, len - 1);
533 if (maybe_result->IsFailure()) return maybe_result;
534 MaybeObject* maybe_failure =
535 accessor->SetLength(array, Smi::FromInt(new_length));
536 if (maybe_failure->IsFailure()) return maybe_failure;
541 BUILTIN(ArrayShift) {
542 Heap* heap = isolate->heap();
543 Object* receiver = *args.receiver();
544 FixedArrayBase* elms_obj;
545 MaybeObject* maybe_elms_obj =
546 EnsureJSArrayWithWritableFastElements(heap, receiver, NULL, 0);
547 if (maybe_elms_obj == NULL)
548 return CallJsBuiltin(isolate, "ArrayShift", args);
549 if (!maybe_elms_obj->To(&elms_obj)) return maybe_elms_obj;
551 if (!IsJSArrayFastElementMovingAllowed(heap, JSArray::cast(receiver))) {
552 return CallJsBuiltin(isolate, "ArrayShift", args);
554 JSArray* array = JSArray::cast(receiver);
555 ASSERT(!array->map()->is_observed());
557 int len = Smi::cast(array->length())->value();
558 if (len == 0) return heap->undefined_value();
561 ElementsAccessor* accessor = array->GetElementsAccessor();
563 MaybeObject* maybe_first = accessor->Get(receiver, array, 0, elms_obj);
564 if (!maybe_first->To(&first)) return maybe_first;
565 if (first->IsTheHole()) {
566 first = heap->undefined_value();
569 if (!heap->lo_space()->Contains(elms_obj)) {
570 array->set_elements(LeftTrimFixedArray(heap, elms_obj, 1));
572 // Shift the elements.
573 if (elms_obj->IsFixedArray()) {
574 FixedArray* elms = FixedArray::cast(elms_obj);
575 DisallowHeapAllocation no_gc;
576 heap->MoveElements(elms, 0, 1, len - 1);
577 elms->set(len - 1, heap->the_hole_value());
579 FixedDoubleArray* elms = FixedDoubleArray::cast(elms_obj);
580 MoveDoubleElements(elms, 0, elms, 1, len - 1);
581 elms->set_the_hole(len - 1);
586 array->set_length(Smi::FromInt(len - 1));
592 BUILTIN(ArrayUnshift) {
593 Heap* heap = isolate->heap();
594 Object* receiver = *args.receiver();
595 FixedArrayBase* elms_obj;
596 MaybeObject* maybe_elms_obj =
597 EnsureJSArrayWithWritableFastElements(heap, receiver, NULL, 0);
598 if (maybe_elms_obj == NULL)
599 return CallJsBuiltin(isolate, "ArrayUnshift", args);
600 if (!maybe_elms_obj->To(&elms_obj)) return maybe_elms_obj;
602 if (!IsJSArrayFastElementMovingAllowed(heap, JSArray::cast(receiver))) {
603 return CallJsBuiltin(isolate, "ArrayUnshift", args);
605 JSArray* array = JSArray::cast(receiver);
606 ASSERT(!array->map()->is_observed());
607 if (!array->HasFastSmiOrObjectElements()) {
608 return CallJsBuiltin(isolate, "ArrayUnshift", args);
610 FixedArray* elms = FixedArray::cast(elms_obj);
612 int len = Smi::cast(array->length())->value();
613 int to_add = args.length() - 1;
614 int new_length = len + to_add;
615 // Currently fixed arrays cannot grow too big, so
616 // we should never hit this case.
617 ASSERT(to_add <= (Smi::kMaxValue - len));
619 MaybeObject* maybe_object =
620 array->EnsureCanContainElements(&args, 1, to_add,
621 DONT_ALLOW_DOUBLE_ELEMENTS);
622 if (maybe_object->IsFailure()) return maybe_object;
624 if (new_length > elms->length()) {
625 // New backing storage is needed.
626 int capacity = new_length + (new_length >> 1) + 16;
627 FixedArray* new_elms;
628 MaybeObject* maybe_elms = heap->AllocateUninitializedFixedArray(capacity);
629 if (!maybe_elms->To(&new_elms)) return maybe_elms;
631 ElementsKind kind = array->GetElementsKind();
632 ElementsAccessor* accessor = array->GetElementsAccessor();
633 MaybeObject* maybe_failure = accessor->CopyElements(
634 NULL, 0, kind, new_elms, to_add,
635 ElementsAccessor::kCopyToEndAndInitializeToHole, elms);
636 ASSERT(!maybe_failure->IsFailure());
640 array->set_elements(elms);
642 DisallowHeapAllocation no_gc;
643 heap->MoveElements(elms, to_add, 0, len);
646 // Add the provided values.
647 DisallowHeapAllocation no_gc;
648 WriteBarrierMode mode = elms->GetWriteBarrierMode(no_gc);
649 for (int i = 0; i < to_add; i++) {
650 elms->set(i, args[i + 1], mode);
654 array->set_length(Smi::FromInt(new_length));
655 return Smi::FromInt(new_length);
659 BUILTIN(ArraySlice) {
660 Heap* heap = isolate->heap();
661 Object* receiver = *args.receiver();
662 FixedArrayBase* elms;
664 if (receiver->IsJSArray()) {
665 JSArray* array = JSArray::cast(receiver);
666 if (!IsJSArrayFastElementMovingAllowed(heap, array)) {
667 return CallJsBuiltin(isolate, "ArraySlice", args);
670 if (array->HasFastElements()) {
671 elms = array->elements();
673 return CallJsBuiltin(isolate, "ArraySlice", args);
676 len = Smi::cast(array->length())->value();
678 // Array.slice(arguments, ...) is quite a common idiom (notably more
679 // than 50% of invocations in Web apps). Treat it in C++ as well.
681 isolate->context()->native_context()->arguments_boilerplate()->map();
683 bool is_arguments_object_with_fast_elements =
684 receiver->IsJSObject() &&
685 JSObject::cast(receiver)->map() == arguments_map;
686 if (!is_arguments_object_with_fast_elements) {
687 return CallJsBuiltin(isolate, "ArraySlice", args);
689 JSObject* object = JSObject::cast(receiver);
691 if (object->HasFastElements()) {
692 elms = object->elements();
694 return CallJsBuiltin(isolate, "ArraySlice", args);
696 Object* len_obj = object->InObjectPropertyAt(Heap::kArgumentsLengthIndex);
697 if (!len_obj->IsSmi()) {
698 return CallJsBuiltin(isolate, "ArraySlice", args);
700 len = Smi::cast(len_obj)->value();
701 if (len > elms->length()) {
702 return CallJsBuiltin(isolate, "ArraySlice", args);
706 JSObject* object = JSObject::cast(receiver);
709 int n_arguments = args.length() - 1;
711 // Note carefully choosen defaults---if argument is missing,
712 // it's undefined which gets converted to 0 for relative_start
713 // and to len for relative_end.
714 int relative_start = 0;
715 int relative_end = len;
716 if (n_arguments > 0) {
717 Object* arg1 = args[1];
719 relative_start = Smi::cast(arg1)->value();
720 } else if (arg1->IsHeapNumber()) {
721 double start = HeapNumber::cast(arg1)->value();
722 if (start < kMinInt || start > kMaxInt) {
723 return CallJsBuiltin(isolate, "ArraySlice", args);
725 relative_start = std::isnan(start) ? 0 : static_cast<int>(start);
726 } else if (!arg1->IsUndefined()) {
727 return CallJsBuiltin(isolate, "ArraySlice", args);
729 if (n_arguments > 1) {
730 Object* arg2 = args[2];
732 relative_end = Smi::cast(arg2)->value();
733 } else if (arg2->IsHeapNumber()) {
734 double end = HeapNumber::cast(arg2)->value();
735 if (end < kMinInt || end > kMaxInt) {
736 return CallJsBuiltin(isolate, "ArraySlice", args);
738 relative_end = std::isnan(end) ? 0 : static_cast<int>(end);
739 } else if (!arg2->IsUndefined()) {
740 return CallJsBuiltin(isolate, "ArraySlice", args);
745 // ECMAScript 232, 3rd Edition, Section 15.4.4.10, step 6.
746 int k = (relative_start < 0) ? Max(len + relative_start, 0)
747 : Min(relative_start, len);
749 // ECMAScript 232, 3rd Edition, Section 15.4.4.10, step 8.
750 int final = (relative_end < 0) ? Max(len + relative_end, 0)
751 : Min(relative_end, len);
753 // Calculate the length of result array.
754 int result_len = Max(final - k, 0);
756 ElementsKind kind = object->GetElementsKind();
757 if (IsHoleyElementsKind(kind)) {
759 ElementsAccessor* accessor = ElementsAccessor::ForKind(kind);
760 for (int i = k; i < final; i++) {
761 if (!accessor->HasElement(object, object, i, elms)) {
767 kind = GetPackedElementsKind(kind);
768 } else if (!receiver->IsJSArray()) {
769 return CallJsBuiltin(isolate, "ArraySlice", args);
773 JSArray* result_array;
774 MaybeObject* maybe_array = heap->AllocateJSArrayAndStorage(kind,
778 DisallowHeapAllocation no_gc;
779 if (result_len == 0) return maybe_array;
780 if (!maybe_array->To(&result_array)) return maybe_array;
782 ElementsAccessor* accessor = object->GetElementsAccessor();
783 MaybeObject* maybe_failure = accessor->CopyElements(
784 NULL, k, kind, result_array->elements(), 0, result_len, elms);
785 ASSERT(!maybe_failure->IsFailure());
792 BUILTIN(ArraySplice) {
793 Heap* heap = isolate->heap();
794 Object* receiver = *args.receiver();
795 FixedArrayBase* elms_obj;
796 MaybeObject* maybe_elms =
797 EnsureJSArrayWithWritableFastElements(heap, receiver, &args, 3);
798 if (maybe_elms == NULL) {
799 return CallJsBuiltin(isolate, "ArraySplice", args);
801 if (!maybe_elms->To(&elms_obj)) return maybe_elms;
803 if (!IsJSArrayFastElementMovingAllowed(heap, JSArray::cast(receiver))) {
804 return CallJsBuiltin(isolate, "ArraySplice", args);
806 JSArray* array = JSArray::cast(receiver);
807 ASSERT(!array->map()->is_observed());
809 int len = Smi::cast(array->length())->value();
811 int n_arguments = args.length() - 1;
813 int relative_start = 0;
814 if (n_arguments > 0) {
815 Object* arg1 = args[1];
817 relative_start = Smi::cast(arg1)->value();
818 } else if (arg1->IsHeapNumber()) {
819 double start = HeapNumber::cast(arg1)->value();
820 if (start < kMinInt || start > kMaxInt) {
821 return CallJsBuiltin(isolate, "ArraySplice", args);
823 relative_start = std::isnan(start) ? 0 : static_cast<int>(start);
824 } else if (!arg1->IsUndefined()) {
825 return CallJsBuiltin(isolate, "ArraySplice", args);
828 int actual_start = (relative_start < 0) ? Max(len + relative_start, 0)
829 : Min(relative_start, len);
831 // SpiderMonkey, TraceMonkey and JSC treat the case where no delete count is
832 // given as a request to delete all the elements from the start.
833 // And it differs from the case of undefined delete count.
834 // This does not follow ECMA-262, but we do the same for
836 int actual_delete_count;
837 if (n_arguments == 1) {
838 ASSERT(len - actual_start >= 0);
839 actual_delete_count = len - actual_start;
841 int value = 0; // ToInteger(undefined) == 0
842 if (n_arguments > 1) {
843 Object* arg2 = args[2];
845 value = Smi::cast(arg2)->value();
847 return CallJsBuiltin(isolate, "ArraySplice", args);
850 actual_delete_count = Min(Max(value, 0), len - actual_start);
853 ElementsKind elements_kind = array->GetElementsKind();
855 int item_count = (n_arguments > 1) ? (n_arguments - 2) : 0;
856 int new_length = len - actual_delete_count + item_count;
858 // For double mode we do not support changing the length.
859 if (new_length > len && IsFastDoubleElementsKind(elements_kind)) {
860 return CallJsBuiltin(isolate, "ArraySplice", args);
863 if (new_length == 0) {
864 MaybeObject* maybe_array = heap->AllocateJSArrayWithElements(
865 elms_obj, elements_kind, actual_delete_count);
866 if (maybe_array->IsFailure()) return maybe_array;
867 array->set_elements(heap->empty_fixed_array());
868 array->set_length(Smi::FromInt(0));
872 JSArray* result_array = NULL;
873 MaybeObject* maybe_array =
874 heap->AllocateJSArrayAndStorage(elements_kind,
876 actual_delete_count);
877 if (!maybe_array->To(&result_array)) return maybe_array;
879 if (actual_delete_count > 0) {
880 DisallowHeapAllocation no_gc;
881 ElementsAccessor* accessor = array->GetElementsAccessor();
882 MaybeObject* maybe_failure = accessor->CopyElements(
883 NULL, actual_start, elements_kind, result_array->elements(),
884 0, actual_delete_count, elms_obj);
885 // Cannot fail since the origin and target array are of the same elements
887 ASSERT(!maybe_failure->IsFailure());
891 bool elms_changed = false;
892 if (item_count < actual_delete_count) {
894 const bool trim_array = !heap->lo_space()->Contains(elms_obj) &&
895 ((actual_start + item_count) <
896 (len - actual_delete_count - actual_start));
898 const int delta = actual_delete_count - item_count;
900 if (elms_obj->IsFixedDoubleArray()) {
901 FixedDoubleArray* elms = FixedDoubleArray::cast(elms_obj);
902 MoveDoubleElements(elms, delta, elms, 0, actual_start);
904 FixedArray* elms = FixedArray::cast(elms_obj);
905 DisallowHeapAllocation no_gc;
906 heap->MoveElements(elms, delta, 0, actual_start);
909 elms_obj = LeftTrimFixedArray(heap, elms_obj, delta);
913 if (elms_obj->IsFixedDoubleArray()) {
914 FixedDoubleArray* elms = FixedDoubleArray::cast(elms_obj);
915 MoveDoubleElements(elms, actual_start + item_count,
916 elms, actual_start + actual_delete_count,
917 (len - actual_delete_count - actual_start));
918 FillWithHoles(elms, new_length, len);
920 FixedArray* elms = FixedArray::cast(elms_obj);
921 DisallowHeapAllocation no_gc;
922 heap->MoveElements(elms, actual_start + item_count,
923 actual_start + actual_delete_count,
924 (len - actual_delete_count - actual_start));
925 FillWithHoles(heap, elms, new_length, len);
928 } else if (item_count > actual_delete_count) {
929 FixedArray* elms = FixedArray::cast(elms_obj);
930 // Currently fixed arrays cannot grow too big, so
931 // we should never hit this case.
932 ASSERT((item_count - actual_delete_count) <= (Smi::kMaxValue - len));
934 // Check if array need to grow.
935 if (new_length > elms->length()) {
936 // New backing storage is needed.
937 int capacity = new_length + (new_length >> 1) + 16;
938 FixedArray* new_elms;
939 MaybeObject* maybe_obj = heap->AllocateUninitializedFixedArray(capacity);
940 if (!maybe_obj->To(&new_elms)) return maybe_obj;
942 DisallowHeapAllocation no_gc;
944 ElementsKind kind = array->GetElementsKind();
945 ElementsAccessor* accessor = array->GetElementsAccessor();
946 if (actual_start > 0) {
947 // Copy the part before actual_start as is.
948 MaybeObject* maybe_failure = accessor->CopyElements(
949 NULL, 0, kind, new_elms, 0, actual_start, elms);
950 ASSERT(!maybe_failure->IsFailure());
953 MaybeObject* maybe_failure = accessor->CopyElements(
954 NULL, actual_start + actual_delete_count, kind, new_elms,
955 actual_start + item_count,
956 ElementsAccessor::kCopyToEndAndInitializeToHole, elms);
957 ASSERT(!maybe_failure->IsFailure());
963 DisallowHeapAllocation no_gc;
964 heap->MoveElements(elms, actual_start + item_count,
965 actual_start + actual_delete_count,
966 (len - actual_delete_count - actual_start));
970 if (IsFastDoubleElementsKind(elements_kind)) {
971 FixedDoubleArray* elms = FixedDoubleArray::cast(elms_obj);
972 for (int k = actual_start; k < actual_start + item_count; k++) {
973 Object* arg = args[3 + k - actual_start];
975 elms->set(k, Smi::cast(arg)->value());
977 elms->set(k, HeapNumber::cast(arg)->value());
981 FixedArray* elms = FixedArray::cast(elms_obj);
982 DisallowHeapAllocation no_gc;
983 WriteBarrierMode mode = elms->GetWriteBarrierMode(no_gc);
984 for (int k = actual_start; k < actual_start + item_count; k++) {
985 elms->set(k, args[3 + k - actual_start], mode);
990 array->set_elements(elms_obj);
993 array->set_length(Smi::FromInt(new_length));
999 BUILTIN(ArrayConcat) {
1000 Heap* heap = isolate->heap();
1001 Context* native_context = isolate->context()->native_context();
1002 JSObject* array_proto =
1003 JSObject::cast(native_context->array_function()->prototype());
1004 if (!ArrayPrototypeHasNoElements(heap, native_context, array_proto)) {
1005 return CallJsBuiltin(isolate, "ArrayConcat", args);
1008 // Iterate through all the arguments performing checks
1009 // and calculating total length.
1010 int n_arguments = args.length();
1012 ElementsKind elements_kind = GetInitialFastElementsKind();
1013 bool has_double = false;
1014 bool is_holey = false;
1015 for (int i = 0; i < n_arguments; i++) {
1016 Object* arg = args[i];
1017 if (!arg->IsJSArray() ||
1018 !JSArray::cast(arg)->HasFastElements() ||
1019 JSArray::cast(arg)->GetPrototype() != array_proto) {
1020 return CallJsBuiltin(isolate, "ArrayConcat", args);
1022 int len = Smi::cast(JSArray::cast(arg)->length())->value();
1024 // We shouldn't overflow when adding another len.
1025 const int kHalfOfMaxInt = 1 << (kBitsPerInt - 2);
1026 STATIC_ASSERT(FixedArray::kMaxLength < kHalfOfMaxInt);
1029 ASSERT(result_len >= 0);
1031 if (result_len > FixedDoubleArray::kMaxLength) {
1032 return CallJsBuiltin(isolate, "ArrayConcat", args);
1035 ElementsKind arg_kind = JSArray::cast(arg)->map()->elements_kind();
1036 has_double = has_double || IsFastDoubleElementsKind(arg_kind);
1037 is_holey = is_holey || IsFastHoleyElementsKind(arg_kind);
1038 if (IsMoreGeneralElementsKindTransition(elements_kind, arg_kind)) {
1039 elements_kind = arg_kind;
1043 if (is_holey) elements_kind = GetHoleyElementsKind(elements_kind);
1045 // If a double array is concatted into a fast elements array, the fast
1046 // elements array needs to be initialized to contain proper holes, since
1047 // boxing doubles may cause incremental marking.
1048 ArrayStorageAllocationMode mode =
1049 has_double && IsFastObjectElementsKind(elements_kind)
1050 ? INITIALIZE_ARRAY_ELEMENTS_WITH_HOLE : DONT_INITIALIZE_ARRAY_ELEMENTS;
1051 JSArray* result_array;
1053 MaybeObject* maybe_array =
1054 heap->AllocateJSArrayAndStorage(elements_kind,
1058 if (!maybe_array->To(&result_array)) return maybe_array;
1059 if (result_len == 0) return result_array;
1062 FixedArrayBase* storage = result_array->elements();
1063 ElementsAccessor* accessor = ElementsAccessor::ForKind(elements_kind);
1064 for (int i = 0; i < n_arguments; i++) {
1065 JSArray* array = JSArray::cast(args[i]);
1066 int len = Smi::cast(array->length())->value();
1067 ElementsKind from_kind = array->GetElementsKind();
1069 MaybeObject* maybe_failure =
1070 accessor->CopyElements(array, 0, from_kind, storage, j, len);
1071 if (maybe_failure->IsFailure()) return maybe_failure;
1076 ASSERT(j == result_len);
1078 return result_array;
1082 // -----------------------------------------------------------------------------
1083 // Strict mode poison pills
1086 BUILTIN(StrictModePoisonPill) {
1087 HandleScope scope(isolate);
1088 return isolate->Throw(*isolate->factory()->NewTypeError(
1089 "strict_poison_pill", HandleVector<Object>(NULL, 0)));
1093 // -----------------------------------------------------------------------------
1097 // Searches the hidden prototype chain of the given object for the first
1098 // object that is an instance of the given type. If no such object can
1099 // be found then Heap::null_value() is returned.
1100 static inline Object* FindHidden(Heap* heap,
1102 FunctionTemplateInfo* type) {
1103 if (type->IsTemplateFor(object)) return object;
1104 Object* proto = object->GetPrototype(heap->isolate());
1105 if (proto->IsJSObject() &&
1106 JSObject::cast(proto)->map()->is_hidden_prototype()) {
1107 return FindHidden(heap, proto, type);
1109 return heap->null_value();
1113 // Returns the holder JSObject if the function can legally be called
1114 // with this receiver. Returns Heap::null_value() if the call is
1115 // illegal. Any arguments that don't fit the expected type is
1116 // overwritten with undefined. Note that holder and the arguments are
1117 // implicitly rewritten with the first object in the hidden prototype
1118 // chain that actually has the expected type.
1119 static inline Object* TypeCheck(Heap* heap,
1122 FunctionTemplateInfo* info) {
1123 Object* recv = argv[0];
1124 // API calls are only supported with JSObject receivers.
1125 if (!recv->IsJSObject()) return heap->null_value();
1126 Object* sig_obj = info->signature();
1127 if (sig_obj->IsUndefined()) return recv;
1128 SignatureInfo* sig = SignatureInfo::cast(sig_obj);
1129 // If necessary, check the receiver
1130 Object* recv_type = sig->receiver();
1131 Object* holder = recv;
1132 if (!recv_type->IsUndefined()) {
1133 holder = FindHidden(heap, holder, FunctionTemplateInfo::cast(recv_type));
1134 if (holder == heap->null_value()) return heap->null_value();
1136 Object* args_obj = sig->args();
1137 // If there is no argument signature we're done
1138 if (args_obj->IsUndefined()) return holder;
1139 FixedArray* args = FixedArray::cast(args_obj);
1140 int length = args->length();
1141 if (argc <= length) length = argc - 1;
1142 for (int i = 0; i < length; i++) {
1143 Object* argtype = args->get(i);
1144 if (argtype->IsUndefined()) continue;
1145 Object** arg = &argv[-1 - i];
1146 Object* current = *arg;
1147 current = FindHidden(heap, current, FunctionTemplateInfo::cast(argtype));
1148 if (current == heap->null_value()) current = heap->undefined_value();
1155 template <bool is_construct>
1156 MUST_USE_RESULT static MaybeObject* HandleApiCallHelper(
1157 BuiltinArguments<NEEDS_CALLED_FUNCTION> args, Isolate* isolate) {
1158 ASSERT(is_construct == CalledAsConstructor(isolate));
1159 Heap* heap = isolate->heap();
1161 HandleScope scope(isolate);
1162 Handle<JSFunction> function = args.called_function();
1163 ASSERT(function->shared()->IsApiFunction());
1165 FunctionTemplateInfo* fun_data = function->shared()->get_api_func_data();
1167 Handle<FunctionTemplateInfo> desc(fun_data, isolate);
1168 bool pending_exception = false;
1169 isolate->factory()->ConfigureInstance(
1170 desc, Handle<JSObject>::cast(args.receiver()), &pending_exception);
1171 ASSERT(isolate->has_pending_exception() == pending_exception);
1172 if (pending_exception) return Failure::Exception();
1176 SharedFunctionInfo* shared = function->shared();
1177 if (shared->is_classic_mode() && !shared->native()) {
1178 Object* recv = args[0];
1179 ASSERT(!recv->IsNull());
1180 if (recv->IsUndefined()) {
1181 args[0] = function->context()->global_object()->global_receiver();
1185 Object* raw_holder = TypeCheck(heap, args.length(), &args[0], fun_data);
1187 if (raw_holder->IsNull()) {
1188 // This function cannot be called with the given receiver. Abort!
1189 Handle<Object> obj =
1190 isolate->factory()->NewTypeError(
1191 "illegal_invocation", HandleVector(&function, 1));
1192 return isolate->Throw(*obj);
1195 Object* raw_call_data = fun_data->call_code();
1196 if (!raw_call_data->IsUndefined()) {
1197 CallHandlerInfo* call_data = CallHandlerInfo::cast(raw_call_data);
1198 Object* callback_obj = call_data->callback();
1199 v8::FunctionCallback callback =
1200 v8::ToCData<v8::FunctionCallback>(callback_obj);
1201 Object* data_obj = call_data->data();
1204 LOG(isolate, ApiObjectAccess("call", JSObject::cast(*args.receiver())));
1205 ASSERT(raw_holder->IsJSObject());
1207 FunctionCallbackArguments custom(isolate,
1215 v8::Handle<v8::Value> value = custom.Call(callback);
1216 if (value.IsEmpty()) {
1217 result = heap->undefined_value();
1219 result = *reinterpret_cast<Object**>(*value);
1220 result->VerifyApiCallResultType();
1223 RETURN_IF_SCHEDULED_EXCEPTION(isolate);
1224 if (!is_construct || result->IsJSObject()) return result;
1227 return *args.receiver();
1231 BUILTIN(HandleApiCall) {
1232 return HandleApiCallHelper<false>(args, isolate);
1236 BUILTIN(HandleApiCallConstruct) {
1237 return HandleApiCallHelper<true>(args, isolate);
1241 // Helper function to handle calls to non-function objects created through the
1242 // API. The object can be called as either a constructor (using new) or just as
1243 // a function (without new).
1244 MUST_USE_RESULT static MaybeObject* HandleApiCallAsFunctionOrConstructor(
1246 bool is_construct_call,
1247 BuiltinArguments<NO_EXTRA_ARGUMENTS> args) {
1248 // Non-functions are never called as constructors. Even if this is an object
1249 // called as a constructor the delegate call is not a construct call.
1250 ASSERT(!CalledAsConstructor(isolate));
1251 Heap* heap = isolate->heap();
1253 Handle<Object> receiver = args.receiver();
1255 // Get the object called.
1256 JSObject* obj = JSObject::cast(*receiver);
1258 // Get the invocation callback from the function descriptor that was
1259 // used to create the called object.
1260 ASSERT(obj->map()->has_instance_call_handler());
1261 JSFunction* constructor = JSFunction::cast(obj->map()->constructor());
1262 ASSERT(constructor->shared()->IsApiFunction());
1264 constructor->shared()->get_api_func_data()->instance_call_handler();
1265 ASSERT(!handler->IsUndefined());
1266 CallHandlerInfo* call_data = CallHandlerInfo::cast(handler);
1267 Object* callback_obj = call_data->callback();
1268 v8::FunctionCallback callback =
1269 v8::ToCData<v8::FunctionCallback>(callback_obj);
1271 // Get the data for the call and perform the callback.
1274 HandleScope scope(isolate);
1275 LOG(isolate, ApiObjectAccess("call non-function", obj));
1277 FunctionCallbackArguments custom(isolate,
1284 v8::Handle<v8::Value> value = custom.Call(callback);
1285 if (value.IsEmpty()) {
1286 result = heap->undefined_value();
1288 result = *reinterpret_cast<Object**>(*value);
1289 result->VerifyApiCallResultType();
1292 // Check for exceptions and return result.
1293 RETURN_IF_SCHEDULED_EXCEPTION(isolate);
1298 // Handle calls to non-function objects created through the API. This delegate
1299 // function is used when the call is a normal function call.
1300 BUILTIN(HandleApiCallAsFunction) {
1301 return HandleApiCallAsFunctionOrConstructor(isolate, false, args);
1305 // Handle calls to non-function objects created through the API. This delegate
1306 // function is used when the call is a construct call.
1307 BUILTIN(HandleApiCallAsConstructor) {
1308 return HandleApiCallAsFunctionOrConstructor(isolate, true, args);
1312 static void Generate_LoadIC_Miss(MacroAssembler* masm) {
1313 LoadIC::GenerateMiss(masm);
1317 static void Generate_LoadIC_Normal(MacroAssembler* masm) {
1318 LoadIC::GenerateNormal(masm);
1322 static void Generate_LoadIC_Getter_ForDeopt(MacroAssembler* masm) {
1323 LoadStubCompiler::GenerateLoadViaGetter(
1324 masm, Handle<HeapType>::null(),
1325 LoadStubCompiler::registers()[0], Handle<JSFunction>());
1329 static void Generate_LoadIC_Slow(MacroAssembler* masm) {
1330 LoadIC::GenerateRuntimeGetProperty(masm);
1334 static void Generate_KeyedLoadIC_Initialize(MacroAssembler* masm) {
1335 KeyedLoadIC::GenerateInitialize(masm);
1339 static void Generate_KeyedLoadIC_Slow(MacroAssembler* masm) {
1340 KeyedLoadIC::GenerateRuntimeGetProperty(masm);
1344 static void Generate_KeyedLoadIC_Miss(MacroAssembler* masm) {
1345 KeyedLoadIC::GenerateMiss(masm);
1349 static void Generate_KeyedLoadIC_Generic(MacroAssembler* masm) {
1350 KeyedLoadIC::GenerateGeneric(masm);
1354 static void Generate_KeyedLoadIC_String(MacroAssembler* masm) {
1355 KeyedLoadIC::GenerateString(masm);
1359 static void Generate_KeyedLoadIC_PreMonomorphic(MacroAssembler* masm) {
1360 KeyedLoadIC::GeneratePreMonomorphic(masm);
1364 static void Generate_KeyedLoadIC_IndexedInterceptor(MacroAssembler* masm) {
1365 KeyedLoadIC::GenerateIndexedInterceptor(masm);
1369 static void Generate_KeyedLoadIC_NonStrictArguments(MacroAssembler* masm) {
1370 KeyedLoadIC::GenerateNonStrictArguments(masm);
1374 static void Generate_StoreIC_Slow(MacroAssembler* masm) {
1375 StoreIC::GenerateSlow(masm);
1379 static void Generate_StoreIC_Miss(MacroAssembler* masm) {
1380 StoreIC::GenerateMiss(masm);
1384 static void Generate_StoreIC_Normal(MacroAssembler* masm) {
1385 StoreIC::GenerateNormal(masm);
1389 static void Generate_StoreIC_Setter_ForDeopt(MacroAssembler* masm) {
1390 StoreStubCompiler::GenerateStoreViaSetter(
1391 masm, Handle<HeapType>::null(), Handle<JSFunction>());
1395 static void Generate_KeyedStoreIC_Generic(MacroAssembler* masm) {
1396 KeyedStoreIC::GenerateGeneric(masm, kNonStrictMode);
1400 static void Generate_KeyedStoreIC_Generic_Strict(MacroAssembler* masm) {
1401 KeyedStoreIC::GenerateGeneric(masm, kStrictMode);
1405 static void Generate_KeyedStoreIC_Miss(MacroAssembler* masm) {
1406 KeyedStoreIC::GenerateMiss(masm);
1410 static void Generate_KeyedStoreIC_Slow(MacroAssembler* masm) {
1411 KeyedStoreIC::GenerateSlow(masm);
1415 static void Generate_KeyedStoreIC_Initialize(MacroAssembler* masm) {
1416 KeyedStoreIC::GenerateInitialize(masm);
1420 static void Generate_KeyedStoreIC_Initialize_Strict(MacroAssembler* masm) {
1421 KeyedStoreIC::GenerateInitialize(masm);
1425 static void Generate_KeyedStoreIC_PreMonomorphic(MacroAssembler* masm) {
1426 KeyedStoreIC::GeneratePreMonomorphic(masm);
1430 static void Generate_KeyedStoreIC_PreMonomorphic_Strict(MacroAssembler* masm) {
1431 KeyedStoreIC::GeneratePreMonomorphic(masm);
1435 static void Generate_KeyedStoreIC_NonStrictArguments(MacroAssembler* masm) {
1436 KeyedStoreIC::GenerateNonStrictArguments(masm);
1440 #ifdef ENABLE_DEBUGGER_SUPPORT
1441 static void Generate_LoadIC_DebugBreak(MacroAssembler* masm) {
1442 Debug::GenerateLoadICDebugBreak(masm);
1446 static void Generate_StoreIC_DebugBreak(MacroAssembler* masm) {
1447 Debug::GenerateStoreICDebugBreak(masm);
1451 static void Generate_KeyedLoadIC_DebugBreak(MacroAssembler* masm) {
1452 Debug::GenerateKeyedLoadICDebugBreak(masm);
1456 static void Generate_KeyedStoreIC_DebugBreak(MacroAssembler* masm) {
1457 Debug::GenerateKeyedStoreICDebugBreak(masm);
1461 static void Generate_CompareNilIC_DebugBreak(MacroAssembler* masm) {
1462 Debug::GenerateCompareNilICDebugBreak(masm);
1466 static void Generate_Return_DebugBreak(MacroAssembler* masm) {
1467 Debug::GenerateReturnDebugBreak(masm);
1471 static void Generate_CallFunctionStub_DebugBreak(MacroAssembler* masm) {
1472 Debug::GenerateCallFunctionStubDebugBreak(masm);
1476 static void Generate_CallFunctionStub_Recording_DebugBreak(
1477 MacroAssembler* masm) {
1478 Debug::GenerateCallFunctionStubRecordDebugBreak(masm);
1482 static void Generate_CallConstructStub_DebugBreak(MacroAssembler* masm) {
1483 Debug::GenerateCallConstructStubDebugBreak(masm);
1487 static void Generate_CallConstructStub_Recording_DebugBreak(
1488 MacroAssembler* masm) {
1489 Debug::GenerateCallConstructStubRecordDebugBreak(masm);
1493 static void Generate_Slot_DebugBreak(MacroAssembler* masm) {
1494 Debug::GenerateSlotDebugBreak(masm);
1498 static void Generate_PlainReturn_LiveEdit(MacroAssembler* masm) {
1499 Debug::GeneratePlainReturnLiveEdit(masm);
1503 static void Generate_FrameDropper_LiveEdit(MacroAssembler* masm) {
1504 Debug::GenerateFrameDropperLiveEdit(masm);
1509 Builtins::Builtins() : initialized_(false) {
1510 memset(builtins_, 0, sizeof(builtins_[0]) * builtin_count);
1511 memset(names_, 0, sizeof(names_[0]) * builtin_count);
1515 Builtins::~Builtins() {
1519 #define DEF_ENUM_C(name, ignore) FUNCTION_ADDR(Builtin_##name),
1520 Address const Builtins::c_functions_[cfunction_count] = {
1521 BUILTIN_LIST_C(DEF_ENUM_C)
1525 #define DEF_JS_NAME(name, ignore) #name,
1526 #define DEF_JS_ARGC(ignore, argc) argc,
1527 const char* const Builtins::javascript_names_[id_count] = {
1528 BUILTINS_LIST_JS(DEF_JS_NAME)
1531 int const Builtins::javascript_argc_[id_count] = {
1532 BUILTINS_LIST_JS(DEF_JS_ARGC)
1537 struct BuiltinDesc {
1540 const char* s_name; // name is only used for generating log information.
1543 BuiltinExtraArguments extra_args;
1546 #define BUILTIN_FUNCTION_TABLE_INIT { V8_ONCE_INIT, {} }
1548 class BuiltinFunctionTable {
1550 BuiltinDesc* functions() {
1551 CallOnce(&once_, &Builtins::InitBuiltinFunctionTable);
1556 BuiltinDesc functions_[Builtins::builtin_count + 1];
1558 friend class Builtins;
1561 static BuiltinFunctionTable builtin_function_table =
1562 BUILTIN_FUNCTION_TABLE_INIT;
1564 // Define array of pointers to generators and C builtin functions.
1565 // We do this in a sort of roundabout way so that we can do the initialization
1566 // within the lexical scope of Builtins:: and within a context where
1567 // Code::Flags names a non-abstract type.
1568 void Builtins::InitBuiltinFunctionTable() {
1569 BuiltinDesc* functions = builtin_function_table.functions_;
1570 functions[builtin_count].generator = NULL;
1571 functions[builtin_count].c_code = NULL;
1572 functions[builtin_count].s_name = NULL;
1573 functions[builtin_count].name = builtin_count;
1574 functions[builtin_count].flags = static_cast<Code::Flags>(0);
1575 functions[builtin_count].extra_args = NO_EXTRA_ARGUMENTS;
1577 #define DEF_FUNCTION_PTR_C(aname, aextra_args) \
1578 functions->generator = FUNCTION_ADDR(Generate_Adaptor); \
1579 functions->c_code = FUNCTION_ADDR(Builtin_##aname); \
1580 functions->s_name = #aname; \
1581 functions->name = c_##aname; \
1582 functions->flags = Code::ComputeFlags(Code::BUILTIN); \
1583 functions->extra_args = aextra_args; \
1586 #define DEF_FUNCTION_PTR_A(aname, kind, state, extra) \
1587 functions->generator = FUNCTION_ADDR(Generate_##aname); \
1588 functions->c_code = NULL; \
1589 functions->s_name = #aname; \
1590 functions->name = k##aname; \
1591 functions->flags = Code::ComputeFlags(Code::kind, \
1594 functions->extra_args = NO_EXTRA_ARGUMENTS; \
1597 #define DEF_FUNCTION_PTR_H(aname, kind) \
1598 functions->generator = FUNCTION_ADDR(Generate_##aname); \
1599 functions->c_code = NULL; \
1600 functions->s_name = #aname; \
1601 functions->name = k##aname; \
1602 functions->flags = Code::ComputeFlags( \
1603 Code::HANDLER, MONOMORPHIC, kNoExtraICState, \
1604 Code::NORMAL, Code::kind); \
1605 functions->extra_args = NO_EXTRA_ARGUMENTS; \
1608 BUILTIN_LIST_C(DEF_FUNCTION_PTR_C)
1609 BUILTIN_LIST_A(DEF_FUNCTION_PTR_A)
1610 BUILTIN_LIST_H(DEF_FUNCTION_PTR_H)
1611 BUILTIN_LIST_DEBUG_A(DEF_FUNCTION_PTR_A)
1613 #undef DEF_FUNCTION_PTR_C
1614 #undef DEF_FUNCTION_PTR_A
1618 void Builtins::SetUp(Isolate* isolate, bool create_heap_objects) {
1619 ASSERT(!initialized_);
1620 Heap* heap = isolate->heap();
1622 // Create a scope for the handles in the builtins.
1623 HandleScope scope(isolate);
1625 const BuiltinDesc* functions = builtin_function_table.functions();
1627 // For now we generate builtin adaptor code into a stack-allocated
1628 // buffer, before copying it into individual code objects. Be careful
1629 // with alignment, some platforms don't like unaligned code.
1630 union { int force_alignment; byte buffer[8*KB]; } u;
1632 // Traverse the list of builtins and generate an adaptor in a
1633 // separate code object for each one.
1634 for (int i = 0; i < builtin_count; i++) {
1635 if (create_heap_objects) {
1636 MacroAssembler masm(isolate, u.buffer, sizeof u.buffer);
1637 // Generate the code/adaptor.
1638 typedef void (*Generator)(MacroAssembler*, int, BuiltinExtraArguments);
1639 Generator g = FUNCTION_CAST<Generator>(functions[i].generator);
1640 // We pass all arguments to the generator, but it may not use all of
1641 // them. This works because the first arguments are on top of the
1643 ASSERT(!masm.has_frame());
1644 g(&masm, functions[i].name, functions[i].extra_args);
1645 // Move the code into the object heap.
1647 masm.GetCode(&desc);
1648 Code::Flags flags = functions[i].flags;
1649 Object* code = NULL;
1651 // During startup it's OK to always allocate and defer GC to later.
1652 // This simplifies things because we don't need to retry.
1653 AlwaysAllocateScope __scope__;
1654 { MaybeObject* maybe_code =
1655 heap->CreateCode(desc, flags, masm.CodeObject());
1656 if (!maybe_code->ToObject(&code)) {
1657 v8::internal::V8::FatalProcessOutOfMemory("CreateCode");
1661 // Log the event and add the code to the builtins array.
1663 CodeCreateEvent(Logger::BUILTIN_TAG,
1665 functions[i].s_name));
1666 GDBJIT(AddCode(GDBJITInterface::BUILTIN,
1667 functions[i].s_name,
1669 builtins_[i] = code;
1670 #ifdef ENABLE_DISASSEMBLER
1671 if (FLAG_print_builtin_code) {
1672 CodeTracer::Scope trace_scope(isolate->GetCodeTracer());
1673 PrintF(trace_scope.file(), "Builtin: %s\n", functions[i].s_name);
1674 Code::cast(code)->Disassemble(functions[i].s_name, trace_scope.file());
1675 PrintF(trace_scope.file(), "\n");
1679 // Deserializing. The values will be filled in during IterateBuiltins.
1680 builtins_[i] = NULL;
1682 names_[i] = functions[i].s_name;
1685 // Mark as initialized.
1686 initialized_ = true;
1690 void Builtins::TearDown() {
1691 initialized_ = false;
1695 void Builtins::IterateBuiltins(ObjectVisitor* v) {
1696 v->VisitPointers(&builtins_[0], &builtins_[0] + builtin_count);
1700 const char* Builtins::Lookup(byte* pc) {
1701 // may be called during initialization (disassembler!)
1703 for (int i = 0; i < builtin_count; i++) {
1704 Code* entry = Code::cast(builtins_[i]);
1705 if (entry->contains(pc)) {
1714 void Builtins::Generate_InterruptCheck(MacroAssembler* masm) {
1715 masm->TailCallRuntime(Runtime::kInterrupt, 0, 1);
1719 void Builtins::Generate_StackCheck(MacroAssembler* masm) {
1720 masm->TailCallRuntime(Runtime::kStackGuard, 0, 1);
1724 #define DEFINE_BUILTIN_ACCESSOR_C(name, ignore) \
1725 Handle<Code> Builtins::name() { \
1726 Code** code_address = \
1727 reinterpret_cast<Code**>(builtin_address(k##name)); \
1728 return Handle<Code>(code_address); \
1730 #define DEFINE_BUILTIN_ACCESSOR_A(name, kind, state, extra) \
1731 Handle<Code> Builtins::name() { \
1732 Code** code_address = \
1733 reinterpret_cast<Code**>(builtin_address(k##name)); \
1734 return Handle<Code>(code_address); \
1736 #define DEFINE_BUILTIN_ACCESSOR_H(name, kind) \
1737 Handle<Code> Builtins::name() { \
1738 Code** code_address = \
1739 reinterpret_cast<Code**>(builtin_address(k##name)); \
1740 return Handle<Code>(code_address); \
1742 BUILTIN_LIST_C(DEFINE_BUILTIN_ACCESSOR_C)
1743 BUILTIN_LIST_A(DEFINE_BUILTIN_ACCESSOR_A)
1744 BUILTIN_LIST_H(DEFINE_BUILTIN_ACCESSOR_H)
1745 BUILTIN_LIST_DEBUG_A(DEFINE_BUILTIN_ACCESSOR_A)
1746 #undef DEFINE_BUILTIN_ACCESSOR_C
1747 #undef DEFINE_BUILTIN_ACCESSOR_A
1750 } } // namespace v8::internal