1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
8 #include "src/api-natives.h"
9 #include "src/arguments.h"
10 #include "src/base/once.h"
11 #include "src/bootstrapper.h"
12 #include "src/builtins.h"
13 #include "src/cpu-profiler.h"
14 #include "src/gdb-jit.h"
15 #include "src/heap/mark-compact.h"
16 #include "src/heap-profiler.h"
17 #include "src/ic/handler-compiler.h"
18 #include "src/ic/ic.h"
19 #include "src/prototype.h"
20 #include "src/vm-state-inl.h"
27 // Arguments object passed to C++ builtins.
28 template <BuiltinExtraArguments extra_args>
29 class BuiltinArguments : public Arguments {
31 BuiltinArguments(int length, Object** arguments)
32 : Arguments(length, arguments) { }
34 Object*& operator[] (int index) {
35 DCHECK(index < length());
36 return Arguments::operator[](index);
39 template <class S> Handle<S> at(int index) {
40 DCHECK(index < length());
41 return Arguments::at<S>(index);
44 Handle<Object> receiver() {
45 return Arguments::at<Object>(0);
48 Handle<JSFunction> called_function() {
49 STATIC_ASSERT(extra_args == NEEDS_CALLED_FUNCTION);
50 return Arguments::at<JSFunction>(Arguments::length() - 1);
53 // Gets the total number of arguments including the receiver (but
54 // excluding extra arguments).
56 STATIC_ASSERT(extra_args == NO_EXTRA_ARGUMENTS);
57 return Arguments::length();
62 // Check we have at least the receiver.
63 DCHECK(Arguments::length() >= 1);
69 // Specialize BuiltinArguments for the called function extra argument.
72 int BuiltinArguments<NEEDS_CALLED_FUNCTION>::length() const {
73 return Arguments::length() - 1;
78 void BuiltinArguments<NEEDS_CALLED_FUNCTION>::Verify() {
79 // Check we have at least the receiver and the called function.
80 DCHECK(Arguments::length() >= 2);
81 // Make sure cast to JSFunction succeeds.
87 #define DEF_ARG_TYPE(name, spec) \
88 typedef BuiltinArguments<spec> name##ArgumentsType;
89 BUILTIN_LIST_C(DEF_ARG_TYPE)
94 // ----------------------------------------------------------------------------
95 // Support macro for defining builtins in C++.
96 // ----------------------------------------------------------------------------
98 // A builtin function is defined by writing:
104 // In the body of the builtin function the arguments can be accessed
105 // through the BuiltinArguments object args.
109 #define BUILTIN(name) \
110 MUST_USE_RESULT static Object* Builtin_Impl_##name( \
111 name##ArgumentsType args, Isolate* isolate); \
112 MUST_USE_RESULT static Object* Builtin_##name( \
113 int args_length, Object** args_object, Isolate* isolate) { \
114 name##ArgumentsType args(args_length, args_object); \
116 return Builtin_Impl_##name(args, isolate); \
118 MUST_USE_RESULT static Object* Builtin_Impl_##name( \
119 name##ArgumentsType args, Isolate* isolate)
121 #else // For release mode.
123 #define BUILTIN(name) \
124 static Object* Builtin_impl##name( \
125 name##ArgumentsType args, Isolate* isolate); \
126 static Object* Builtin_##name( \
127 int args_length, Object** args_object, Isolate* isolate) { \
128 name##ArgumentsType args(args_length, args_object); \
129 return Builtin_impl##name(args, isolate); \
131 static Object* Builtin_impl##name( \
132 name##ArgumentsType args, Isolate* isolate)
137 static inline bool CalledAsConstructor(Isolate* isolate) {
138 // Calculate the result using a full stack frame iterator and check
139 // that the state of the stack is as we assume it to be in the
141 StackFrameIterator it(isolate);
142 DCHECK(it.frame()->is_exit());
144 StackFrame* frame = it.frame();
145 bool reference_result = frame->is_construct();
146 Address fp = Isolate::c_entry_fp(isolate->thread_local_top());
147 // Because we know fp points to an exit frame we can use the relevant
148 // part of ExitFrame::ComputeCallerState directly.
149 const int kCallerOffset = ExitFrameConstants::kCallerFPOffset;
150 Address caller_fp = Memory::Address_at(fp + kCallerOffset);
151 // This inlines the part of StackFrame::ComputeType that grabs the
152 // type of the current frame. Note that StackFrame::ComputeType
153 // has been specialized for each architecture so if any one of them
154 // changes this code has to be changed as well.
155 const int kMarkerOffset = StandardFrameConstants::kMarkerOffset;
156 const Smi* kConstructMarker = Smi::FromInt(StackFrame::CONSTRUCT);
157 Object* marker = Memory::Object_at(caller_fp + kMarkerOffset);
158 bool result = (marker == kConstructMarker);
159 DCHECK_EQ(result, reference_result);
165 // ----------------------------------------------------------------------------
169 return isolate->heap()->undefined_value(); // Make compiler happy.
173 BUILTIN(EmptyFunction) {
174 return isolate->heap()->undefined_value();
178 static void MoveDoubleElements(FixedDoubleArray* dst, int dst_index,
179 FixedDoubleArray* src, int src_index, int len) {
180 if (len == 0) return;
181 MemMove(dst->data_start() + dst_index, src->data_start() + src_index,
186 static bool ArrayPrototypeHasNoElements(Heap* heap, PrototypeIterator* iter) {
187 DisallowHeapAllocation no_gc;
188 for (; !iter->IsAtEnd(); iter->Advance()) {
189 if (iter->GetCurrent()->IsJSProxy()) return false;
190 if (JSObject::cast(iter->GetCurrent())->elements() !=
191 heap->empty_fixed_array()) {
199 static inline bool IsJSArrayFastElementMovingAllowed(Heap* heap,
201 DisallowHeapAllocation no_gc;
202 PrototypeIterator iter(heap->isolate(), receiver);
203 return ArrayPrototypeHasNoElements(heap, &iter);
207 // Returns empty handle if not applicable.
209 static inline MaybeHandle<FixedArrayBase> EnsureJSArrayWithWritableFastElements(
211 Handle<Object> receiver,
213 int first_added_arg) {
214 if (!receiver->IsJSArray()) return MaybeHandle<FixedArrayBase>();
215 Handle<JSArray> array = Handle<JSArray>::cast(receiver);
216 // If there may be elements accessors in the prototype chain, the fast path
217 // cannot be used if there arguments to add to the array.
218 Heap* heap = isolate->heap();
219 if (args != NULL && !IsJSArrayFastElementMovingAllowed(heap, *array)) {
220 return MaybeHandle<FixedArrayBase>();
222 if (array->map()->is_observed()) return MaybeHandle<FixedArrayBase>();
223 if (!array->map()->is_extensible()) return MaybeHandle<FixedArrayBase>();
224 Handle<FixedArrayBase> elms(array->elements(), isolate);
225 Map* map = elms->map();
226 if (map == heap->fixed_array_map()) {
227 if (args == NULL || array->HasFastObjectElements()) return elms;
228 } else if (map == heap->fixed_cow_array_map()) {
229 elms = JSObject::EnsureWritableFastElements(array);
230 if (args == NULL || array->HasFastObjectElements()) return elms;
231 } else if (map == heap->fixed_double_array_map()) {
232 if (args == NULL) return elms;
234 return MaybeHandle<FixedArrayBase>();
237 // Need to ensure that the arguments passed in args can be contained in
239 int args_length = args->length();
240 if (first_added_arg >= args_length) return handle(array->elements(), isolate);
242 ElementsKind origin_kind = array->map()->elements_kind();
243 DCHECK(!IsFastObjectElementsKind(origin_kind));
244 ElementsKind target_kind = origin_kind;
246 DisallowHeapAllocation no_gc;
247 int arg_count = args->length() - first_added_arg;
248 Object** arguments = args->arguments() - first_added_arg - (arg_count - 1);
249 for (int i = 0; i < arg_count; i++) {
250 Object* arg = arguments[i];
251 if (arg->IsHeapObject()) {
252 if (arg->IsHeapNumber()) {
253 target_kind = FAST_DOUBLE_ELEMENTS;
255 target_kind = FAST_ELEMENTS;
261 if (target_kind != origin_kind) {
262 JSObject::TransitionElementsKind(array, target_kind);
263 return handle(array->elements(), isolate);
269 MUST_USE_RESULT static Object* CallJsBuiltin(
272 BuiltinArguments<NO_EXTRA_ARGUMENTS> args) {
273 HandleScope handleScope(isolate);
275 Handle<Object> js_builtin = Object::GetProperty(
277 handle(isolate->native_context()->builtins(), isolate),
278 name).ToHandleChecked();
279 Handle<JSFunction> function = Handle<JSFunction>::cast(js_builtin);
280 int argc = args.length() - 1;
281 ScopedVector<Handle<Object> > argv(argc);
282 for (int i = 0; i < argc; ++i) {
283 argv[i] = args.at<Object>(i + 1);
285 Handle<Object> result;
286 ASSIGN_RETURN_FAILURE_ON_EXCEPTION(
288 Execution::Call(isolate,
298 HandleScope scope(isolate);
299 Handle<Object> receiver = args.receiver();
300 MaybeHandle<FixedArrayBase> maybe_elms_obj =
301 EnsureJSArrayWithWritableFastElements(isolate, receiver, &args, 1);
302 Handle<FixedArrayBase> elms_obj;
303 if (!maybe_elms_obj.ToHandle(&elms_obj)) {
304 return CallJsBuiltin(isolate, "ArrayPush", args);
307 Handle<JSArray> array = Handle<JSArray>::cast(receiver);
308 int len = Smi::cast(array->length())->value();
309 int to_add = args.length() - 1;
310 if (to_add > 0 && JSArray::WouldChangeReadOnlyLength(array, len + to_add)) {
311 return CallJsBuiltin(isolate, "ArrayPush", args);
313 DCHECK(!array->map()->is_observed());
315 ElementsKind kind = array->GetElementsKind();
317 if (IsFastSmiOrObjectElementsKind(kind)) {
318 Handle<FixedArray> elms = Handle<FixedArray>::cast(elms_obj);
320 return Smi::FromInt(len);
322 // Currently fixed arrays cannot grow too big, so
323 // we should never hit this case.
324 DCHECK(to_add <= (Smi::kMaxValue - len));
326 int new_length = len + to_add;
328 if (new_length > elms->length()) {
329 // New backing storage is needed.
330 int capacity = new_length + (new_length >> 1) + 16;
331 Handle<FixedArray> new_elms =
332 isolate->factory()->NewUninitializedFixedArray(capacity);
334 ElementsAccessor* accessor = array->GetElementsAccessor();
335 accessor->CopyElements(
336 elms_obj, 0, kind, new_elms, 0,
337 ElementsAccessor::kCopyToEndAndInitializeToHole);
342 // Add the provided values.
343 DisallowHeapAllocation no_gc;
344 WriteBarrierMode mode = elms->GetWriteBarrierMode(no_gc);
345 for (int index = 0; index < to_add; index++) {
346 elms->set(index + len, args[index + 1], mode);
349 if (*elms != array->elements()) {
350 array->set_elements(*elms);
354 array->set_length(Smi::FromInt(new_length));
355 return Smi::FromInt(new_length);
357 int elms_len = elms_obj->length();
359 return Smi::FromInt(len);
361 // Currently fixed arrays cannot grow too big, so
362 // we should never hit this case.
363 DCHECK(to_add <= (Smi::kMaxValue - len));
365 int new_length = len + to_add;
367 Handle<FixedDoubleArray> new_elms;
369 if (new_length > elms_len) {
370 // New backing storage is needed.
371 int capacity = new_length + (new_length >> 1) + 16;
372 // Create new backing store; since capacity > 0, we can
373 // safely cast to FixedDoubleArray.
374 new_elms = Handle<FixedDoubleArray>::cast(
375 isolate->factory()->NewFixedDoubleArray(capacity));
377 ElementsAccessor* accessor = array->GetElementsAccessor();
378 accessor->CopyElements(
379 elms_obj, 0, kind, new_elms, 0,
380 ElementsAccessor::kCopyToEndAndInitializeToHole);
383 // to_add is > 0 and new_length <= elms_len, so elms_obj cannot be the
384 // empty_fixed_array.
385 new_elms = Handle<FixedDoubleArray>::cast(elms_obj);
388 // Add the provided values.
389 DisallowHeapAllocation no_gc;
391 for (index = 0; index < to_add; index++) {
392 Object* arg = args[index + 1];
393 new_elms->set(index + len, arg->Number());
396 if (*new_elms != array->elements()) {
397 array->set_elements(*new_elms);
401 array->set_length(Smi::FromInt(new_length));
402 return Smi::FromInt(new_length);
408 HandleScope scope(isolate);
409 Handle<Object> receiver = args.receiver();
410 MaybeHandle<FixedArrayBase> maybe_elms_obj =
411 EnsureJSArrayWithWritableFastElements(isolate, receiver, NULL, 0);
412 Handle<FixedArrayBase> elms_obj;
413 if (!maybe_elms_obj.ToHandle(&elms_obj)) {
414 return CallJsBuiltin(isolate, "ArrayPop", args);
417 Handle<JSArray> array = Handle<JSArray>::cast(receiver);
418 DCHECK(!array->map()->is_observed());
420 int len = Smi::cast(array->length())->value();
421 if (len == 0) return isolate->heap()->undefined_value();
423 if (JSArray::HasReadOnlyLength(array)) {
424 return CallJsBuiltin(isolate, "ArrayPop", args);
427 ElementsAccessor* accessor = array->GetElementsAccessor();
428 int new_length = len - 1;
429 Handle<Object> element =
430 accessor->Get(array, array, new_length, elms_obj).ToHandleChecked();
431 if (element->IsTheHole()) {
432 return CallJsBuiltin(isolate, "ArrayPop", args);
434 RETURN_FAILURE_ON_EXCEPTION(
436 accessor->SetLength(array, handle(Smi::FromInt(new_length), isolate)));
441 BUILTIN(ArrayShift) {
442 HandleScope scope(isolate);
443 Heap* heap = isolate->heap();
444 Handle<Object> receiver = args.receiver();
445 MaybeHandle<FixedArrayBase> maybe_elms_obj =
446 EnsureJSArrayWithWritableFastElements(isolate, receiver, NULL, 0);
447 Handle<FixedArrayBase> elms_obj;
448 if (!maybe_elms_obj.ToHandle(&elms_obj) ||
449 !IsJSArrayFastElementMovingAllowed(heap, JSArray::cast(*receiver))) {
450 return CallJsBuiltin(isolate, "ArrayShift", args);
452 Handle<JSArray> array = Handle<JSArray>::cast(receiver);
453 DCHECK(!array->map()->is_observed());
455 int len = Smi::cast(array->length())->value();
456 if (len == 0) return heap->undefined_value();
458 if (JSArray::HasReadOnlyLength(array)) {
459 return CallJsBuiltin(isolate, "ArrayShift", args);
463 ElementsAccessor* accessor = array->GetElementsAccessor();
464 Handle<Object> first =
465 accessor->Get(array, array, 0, elms_obj).ToHandleChecked();
466 if (first->IsTheHole()) {
467 return CallJsBuiltin(isolate, "ArrayShift", args);
470 if (heap->CanMoveObjectStart(*elms_obj)) {
471 array->set_elements(heap->LeftTrimFixedArray(*elms_obj, 1));
473 // Shift the elements.
474 if (elms_obj->IsFixedArray()) {
475 Handle<FixedArray> elms = Handle<FixedArray>::cast(elms_obj);
476 DisallowHeapAllocation no_gc;
477 heap->MoveElements(*elms, 0, 1, len - 1);
478 elms->set(len - 1, heap->the_hole_value());
480 Handle<FixedDoubleArray> elms = Handle<FixedDoubleArray>::cast(elms_obj);
481 MoveDoubleElements(*elms, 0, *elms, 1, len - 1);
482 elms->set_the_hole(len - 1);
487 array->set_length(Smi::FromInt(len - 1));
493 BUILTIN(ArrayUnshift) {
494 HandleScope scope(isolate);
495 Heap* heap = isolate->heap();
496 Handle<Object> receiver = args.receiver();
497 MaybeHandle<FixedArrayBase> maybe_elms_obj =
498 EnsureJSArrayWithWritableFastElements(isolate, receiver, &args, 1);
499 Handle<FixedArrayBase> elms_obj;
500 if (!maybe_elms_obj.ToHandle(&elms_obj)) {
501 return CallJsBuiltin(isolate, "ArrayUnshift", args);
503 Handle<JSArray> array = Handle<JSArray>::cast(receiver);
504 DCHECK(!array->map()->is_observed());
505 if (!array->HasFastSmiOrObjectElements()) {
506 return CallJsBuiltin(isolate, "ArrayUnshift", args);
508 int len = Smi::cast(array->length())->value();
509 int to_add = args.length() - 1;
510 int new_length = len + to_add;
511 // Currently fixed arrays cannot grow too big, so
512 // we should never hit this case.
513 DCHECK(to_add <= (Smi::kMaxValue - len));
515 if (to_add > 0 && JSArray::WouldChangeReadOnlyLength(array, len + to_add)) {
516 return CallJsBuiltin(isolate, "ArrayUnshift", args);
519 Handle<FixedArray> elms = Handle<FixedArray>::cast(elms_obj);
521 if (new_length > elms->length()) {
522 // New backing storage is needed.
523 int capacity = new_length + (new_length >> 1) + 16;
524 Handle<FixedArray> new_elms =
525 isolate->factory()->NewUninitializedFixedArray(capacity);
527 ElementsKind kind = array->GetElementsKind();
528 ElementsAccessor* accessor = array->GetElementsAccessor();
529 accessor->CopyElements(
530 elms, 0, kind, new_elms, to_add,
531 ElementsAccessor::kCopyToEndAndInitializeToHole);
534 array->set_elements(*elms);
536 DisallowHeapAllocation no_gc;
537 heap->MoveElements(*elms, to_add, 0, len);
540 // Add the provided values.
541 DisallowHeapAllocation no_gc;
542 WriteBarrierMode mode = elms->GetWriteBarrierMode(no_gc);
543 for (int i = 0; i < to_add; i++) {
544 elms->set(i, args[i + 1], mode);
548 array->set_length(Smi::FromInt(new_length));
549 return Smi::FromInt(new_length);
553 BUILTIN(ArraySlice) {
554 HandleScope scope(isolate);
555 Heap* heap = isolate->heap();
556 Handle<Object> receiver = args.receiver();
558 int relative_start = 0;
559 int relative_end = 0;
561 DisallowHeapAllocation no_gc;
562 if (receiver->IsJSArray()) {
563 JSArray* array = JSArray::cast(*receiver);
564 if (!IsJSArrayFastElementMovingAllowed(heap, array)) {
565 AllowHeapAllocation allow_allocation;
566 return CallJsBuiltin(isolate, "ArraySlice", args);
569 if (!array->HasFastElements()) {
570 AllowHeapAllocation allow_allocation;
571 return CallJsBuiltin(isolate, "ArraySlice", args);
574 len = Smi::cast(array->length())->value();
576 // Array.slice(arguments, ...) is quite a common idiom (notably more
577 // than 50% of invocations in Web apps). Treat it in C++ as well.
579 isolate->context()->native_context()->sloppy_arguments_map();
581 bool is_arguments_object_with_fast_elements =
582 receiver->IsJSObject() &&
583 JSObject::cast(*receiver)->map() == arguments_map;
584 if (!is_arguments_object_with_fast_elements) {
585 AllowHeapAllocation allow_allocation;
586 return CallJsBuiltin(isolate, "ArraySlice", args);
588 JSObject* object = JSObject::cast(*receiver);
590 if (!object->HasFastElements()) {
591 AllowHeapAllocation allow_allocation;
592 return CallJsBuiltin(isolate, "ArraySlice", args);
595 Object* len_obj = object->InObjectPropertyAt(Heap::kArgumentsLengthIndex);
596 if (!len_obj->IsSmi()) {
597 AllowHeapAllocation allow_allocation;
598 return CallJsBuiltin(isolate, "ArraySlice", args);
600 len = Smi::cast(len_obj)->value();
601 if (len > object->elements()->length()) {
602 AllowHeapAllocation allow_allocation;
603 return CallJsBuiltin(isolate, "ArraySlice", args);
608 int n_arguments = args.length() - 1;
610 // Note carefully choosen defaults---if argument is missing,
611 // it's undefined which gets converted to 0 for relative_start
612 // and to len for relative_end.
615 if (n_arguments > 0) {
616 Object* arg1 = args[1];
618 relative_start = Smi::cast(arg1)->value();
619 } else if (arg1->IsHeapNumber()) {
620 double start = HeapNumber::cast(arg1)->value();
621 if (start < kMinInt || start > kMaxInt) {
622 AllowHeapAllocation allow_allocation;
623 return CallJsBuiltin(isolate, "ArraySlice", args);
625 relative_start = std::isnan(start) ? 0 : static_cast<int>(start);
626 } else if (!arg1->IsUndefined()) {
627 AllowHeapAllocation allow_allocation;
628 return CallJsBuiltin(isolate, "ArraySlice", args);
630 if (n_arguments > 1) {
631 Object* arg2 = args[2];
633 relative_end = Smi::cast(arg2)->value();
634 } else if (arg2->IsHeapNumber()) {
635 double end = HeapNumber::cast(arg2)->value();
636 if (end < kMinInt || end > kMaxInt) {
637 AllowHeapAllocation allow_allocation;
638 return CallJsBuiltin(isolate, "ArraySlice", args);
640 relative_end = std::isnan(end) ? 0 : static_cast<int>(end);
641 } else if (!arg2->IsUndefined()) {
642 AllowHeapAllocation allow_allocation;
643 return CallJsBuiltin(isolate, "ArraySlice", args);
649 // ECMAScript 232, 3rd Edition, Section 15.4.4.10, step 6.
650 int k = (relative_start < 0) ? Max(len + relative_start, 0)
651 : Min(relative_start, len);
653 // ECMAScript 232, 3rd Edition, Section 15.4.4.10, step 8.
654 int final = (relative_end < 0) ? Max(len + relative_end, 0)
655 : Min(relative_end, len);
657 // Calculate the length of result array.
658 int result_len = Max(final - k, 0);
660 Handle<JSObject> object = Handle<JSObject>::cast(receiver);
661 Handle<FixedArrayBase> elms(object->elements(), isolate);
663 ElementsKind kind = object->GetElementsKind();
664 if (IsHoleyElementsKind(kind)) {
665 DisallowHeapAllocation no_gc;
667 ElementsAccessor* accessor = ElementsAccessor::ForKind(kind);
668 for (int i = k; i < final; i++) {
669 if (!accessor->HasElement(object, i, elms)) {
675 kind = GetPackedElementsKind(kind);
676 } else if (!receiver->IsJSArray()) {
677 AllowHeapAllocation allow_allocation;
678 return CallJsBuiltin(isolate, "ArraySlice", args);
682 Handle<JSArray> result_array =
683 isolate->factory()->NewJSArray(kind, result_len, result_len);
685 DisallowHeapAllocation no_gc;
686 if (result_len == 0) return *result_array;
688 ElementsAccessor* accessor = object->GetElementsAccessor();
689 accessor->CopyElements(
690 elms, k, kind, handle(result_array->elements(), isolate), 0, result_len);
691 return *result_array;
695 BUILTIN(ArraySplice) {
696 HandleScope scope(isolate);
697 Heap* heap = isolate->heap();
698 Handle<Object> receiver = args.receiver();
699 MaybeHandle<FixedArrayBase> maybe_elms_obj =
700 EnsureJSArrayWithWritableFastElements(isolate, receiver, &args, 3);
701 Handle<FixedArrayBase> elms_obj;
702 if (!maybe_elms_obj.ToHandle(&elms_obj)) {
703 return CallJsBuiltin(isolate, "ArraySplice", args);
705 Handle<JSArray> array = Handle<JSArray>::cast(receiver);
706 DCHECK(!array->map()->is_observed());
708 int len = Smi::cast(array->length())->value();
710 int n_arguments = args.length() - 1;
712 int relative_start = 0;
713 if (n_arguments > 0) {
714 DisallowHeapAllocation no_gc;
715 Object* arg1 = args[1];
717 relative_start = Smi::cast(arg1)->value();
718 } else if (arg1->IsHeapNumber()) {
719 double start = HeapNumber::cast(arg1)->value();
720 if (start < kMinInt || start > kMaxInt) {
721 AllowHeapAllocation allow_allocation;
722 return CallJsBuiltin(isolate, "ArraySplice", args);
724 relative_start = std::isnan(start) ? 0 : static_cast<int>(start);
725 } else if (!arg1->IsUndefined()) {
726 AllowHeapAllocation allow_allocation;
727 return CallJsBuiltin(isolate, "ArraySplice", args);
730 int actual_start = (relative_start < 0) ? Max(len + relative_start, 0)
731 : Min(relative_start, len);
733 // SpiderMonkey, TraceMonkey and JSC treat the case where no delete count is
734 // given as a request to delete all the elements from the start.
735 // And it differs from the case of undefined delete count.
736 // This does not follow ECMA-262, but we do the same for
738 int actual_delete_count;
739 if (n_arguments == 1) {
740 DCHECK(len - actual_start >= 0);
741 actual_delete_count = len - actual_start;
743 int value = 0; // ToInteger(undefined) == 0
744 if (n_arguments > 1) {
745 DisallowHeapAllocation no_gc;
746 Object* arg2 = args[2];
748 value = Smi::cast(arg2)->value();
750 AllowHeapAllocation allow_allocation;
751 return CallJsBuiltin(isolate, "ArraySplice", args);
754 actual_delete_count = Min(Max(value, 0), len - actual_start);
757 ElementsKind elements_kind = array->GetElementsKind();
759 int item_count = (n_arguments > 1) ? (n_arguments - 2) : 0;
760 int new_length = len - actual_delete_count + item_count;
762 // For double mode we do not support changing the length.
763 if (new_length > len && IsFastDoubleElementsKind(elements_kind)) {
764 return CallJsBuiltin(isolate, "ArraySplice", args);
767 if (new_length != len && JSArray::HasReadOnlyLength(array)) {
768 AllowHeapAllocation allow_allocation;
769 return CallJsBuiltin(isolate, "ArraySplice", args);
772 if (new_length == 0) {
773 Handle<JSArray> result = isolate->factory()->NewJSArrayWithElements(
774 elms_obj, elements_kind, actual_delete_count);
775 array->set_elements(heap->empty_fixed_array());
776 array->set_length(Smi::FromInt(0));
780 Handle<JSArray> result_array =
781 isolate->factory()->NewJSArray(elements_kind,
783 actual_delete_count);
785 if (actual_delete_count > 0) {
786 DisallowHeapAllocation no_gc;
787 ElementsAccessor* accessor = array->GetElementsAccessor();
788 accessor->CopyElements(
789 elms_obj, actual_start, elements_kind,
790 handle(result_array->elements(), isolate), 0, actual_delete_count);
793 bool elms_changed = false;
794 if (item_count < actual_delete_count) {
796 const bool trim_array = !heap->lo_space()->Contains(*elms_obj) &&
797 ((actual_start + item_count) <
798 (len - actual_delete_count - actual_start));
800 const int delta = actual_delete_count - item_count;
802 if (elms_obj->IsFixedDoubleArray()) {
803 Handle<FixedDoubleArray> elms =
804 Handle<FixedDoubleArray>::cast(elms_obj);
805 MoveDoubleElements(*elms, delta, *elms, 0, actual_start);
807 Handle<FixedArray> elms = Handle<FixedArray>::cast(elms_obj);
808 DisallowHeapAllocation no_gc;
809 heap->MoveElements(*elms, delta, 0, actual_start);
812 if (heap->CanMoveObjectStart(*elms_obj)) {
813 // On the fast path we move the start of the object in memory.
814 elms_obj = handle(heap->LeftTrimFixedArray(*elms_obj, delta));
816 // This is the slow path. We are going to move the elements to the left
817 // by copying them. For trimmed values we store the hole.
818 if (elms_obj->IsFixedDoubleArray()) {
819 Handle<FixedDoubleArray> elms =
820 Handle<FixedDoubleArray>::cast(elms_obj);
821 MoveDoubleElements(*elms, 0, *elms, delta, len - delta);
822 elms->FillWithHoles(len - delta, len);
824 Handle<FixedArray> elms = Handle<FixedArray>::cast(elms_obj);
825 DisallowHeapAllocation no_gc;
826 heap->MoveElements(*elms, 0, delta, len - delta);
827 elms->FillWithHoles(len - delta, len);
832 if (elms_obj->IsFixedDoubleArray()) {
833 Handle<FixedDoubleArray> elms =
834 Handle<FixedDoubleArray>::cast(elms_obj);
835 MoveDoubleElements(*elms, actual_start + item_count,
836 *elms, actual_start + actual_delete_count,
837 (len - actual_delete_count - actual_start));
838 elms->FillWithHoles(new_length, len);
840 Handle<FixedArray> elms = Handle<FixedArray>::cast(elms_obj);
841 DisallowHeapAllocation no_gc;
842 heap->MoveElements(*elms, actual_start + item_count,
843 actual_start + actual_delete_count,
844 (len - actual_delete_count - actual_start));
845 elms->FillWithHoles(new_length, len);
848 } else if (item_count > actual_delete_count) {
849 Handle<FixedArray> elms = Handle<FixedArray>::cast(elms_obj);
850 // Currently fixed arrays cannot grow too big, so
851 // we should never hit this case.
852 DCHECK((item_count - actual_delete_count) <= (Smi::kMaxValue - len));
854 // Check if array need to grow.
855 if (new_length > elms->length()) {
856 // New backing storage is needed.
857 int capacity = new_length + (new_length >> 1) + 16;
858 Handle<FixedArray> new_elms =
859 isolate->factory()->NewUninitializedFixedArray(capacity);
861 DisallowHeapAllocation no_gc;
863 ElementsKind kind = array->GetElementsKind();
864 ElementsAccessor* accessor = array->GetElementsAccessor();
865 if (actual_start > 0) {
866 // Copy the part before actual_start as is.
867 accessor->CopyElements(
868 elms, 0, kind, new_elms, 0, actual_start);
870 accessor->CopyElements(
871 elms, actual_start + actual_delete_count, kind,
872 new_elms, actual_start + item_count,
873 ElementsAccessor::kCopyToEndAndInitializeToHole);
878 DisallowHeapAllocation no_gc;
879 heap->MoveElements(*elms, actual_start + item_count,
880 actual_start + actual_delete_count,
881 (len - actual_delete_count - actual_start));
885 if (IsFastDoubleElementsKind(elements_kind)) {
886 Handle<FixedDoubleArray> elms = Handle<FixedDoubleArray>::cast(elms_obj);
887 for (int k = actual_start; k < actual_start + item_count; k++) {
888 Object* arg = args[3 + k - actual_start];
890 elms->set(k, Smi::cast(arg)->value());
892 elms->set(k, HeapNumber::cast(arg)->value());
896 Handle<FixedArray> elms = Handle<FixedArray>::cast(elms_obj);
897 DisallowHeapAllocation no_gc;
898 WriteBarrierMode mode = elms->GetWriteBarrierMode(no_gc);
899 for (int k = actual_start; k < actual_start + item_count; k++) {
900 elms->set(k, args[3 + k - actual_start], mode);
905 array->set_elements(*elms_obj);
908 array->set_length(Smi::FromInt(new_length));
910 return *result_array;
914 BUILTIN(ArrayConcat) {
915 HandleScope scope(isolate);
917 int n_arguments = args.length();
919 ElementsKind elements_kind = GetInitialFastElementsKind();
920 bool has_double = false;
922 DisallowHeapAllocation no_gc;
923 Heap* heap = isolate->heap();
924 Context* native_context = isolate->context()->native_context();
925 Object* array_proto = native_context->array_function()->prototype();
926 PrototypeIterator iter(isolate, array_proto,
927 PrototypeIterator::START_AT_RECEIVER);
928 if (!ArrayPrototypeHasNoElements(heap, &iter)) {
929 AllowHeapAllocation allow_allocation;
930 return CallJsBuiltin(isolate, "ArrayConcatJS", args);
933 // Iterate through all the arguments performing checks
934 // and calculating total length.
935 bool is_holey = false;
936 for (int i = 0; i < n_arguments; i++) {
937 Object* arg = args[i];
938 PrototypeIterator iter(isolate, arg);
939 if (!arg->IsJSArray() || !JSArray::cast(arg)->HasFastElements() ||
940 iter.GetCurrent() != array_proto) {
941 AllowHeapAllocation allow_allocation;
942 return CallJsBuiltin(isolate, "ArrayConcatJS", args);
944 int len = Smi::cast(JSArray::cast(arg)->length())->value();
946 // We shouldn't overflow when adding another len.
947 const int kHalfOfMaxInt = 1 << (kBitsPerInt - 2);
948 STATIC_ASSERT(FixedArray::kMaxLength < kHalfOfMaxInt);
951 DCHECK(result_len >= 0);
953 if (result_len > FixedDoubleArray::kMaxLength) {
954 AllowHeapAllocation allow_allocation;
955 return CallJsBuiltin(isolate, "ArrayConcatJS", args);
958 ElementsKind arg_kind = JSArray::cast(arg)->map()->elements_kind();
959 has_double = has_double || IsFastDoubleElementsKind(arg_kind);
960 is_holey = is_holey || IsFastHoleyElementsKind(arg_kind);
961 if (IsMoreGeneralElementsKindTransition(elements_kind, arg_kind)) {
962 elements_kind = arg_kind;
965 if (is_holey) elements_kind = GetHoleyElementsKind(elements_kind);
968 // If a double array is concatted into a fast elements array, the fast
969 // elements array needs to be initialized to contain proper holes, since
970 // boxing doubles may cause incremental marking.
971 ArrayStorageAllocationMode mode =
972 has_double && IsFastObjectElementsKind(elements_kind)
973 ? INITIALIZE_ARRAY_ELEMENTS_WITH_HOLE : DONT_INITIALIZE_ARRAY_ELEMENTS;
974 Handle<JSArray> result_array =
975 isolate->factory()->NewJSArray(elements_kind,
979 if (result_len == 0) return *result_array;
982 Handle<FixedArrayBase> storage(result_array->elements(), isolate);
983 ElementsAccessor* accessor = ElementsAccessor::ForKind(elements_kind);
984 for (int i = 0; i < n_arguments; i++) {
985 // It is crucial to keep |array| in a raw pointer form to avoid performance
987 JSArray* array = JSArray::cast(args[i]);
988 int len = Smi::cast(array->length())->value();
990 ElementsKind from_kind = array->GetElementsKind();
991 accessor->CopyElements(array, 0, from_kind, storage, j, len);
996 DCHECK(j == result_len);
998 return *result_array;
1002 // -----------------------------------------------------------------------------
1003 // Generator and strict mode poison pills
1006 BUILTIN(StrictModePoisonPill) {
1007 HandleScope scope(isolate);
1008 THROW_NEW_ERROR_RETURN_FAILURE(
1010 NewTypeError("strict_poison_pill", HandleVector<Object>(NULL, 0)));
1014 BUILTIN(GeneratorPoisonPill) {
1015 HandleScope scope(isolate);
1016 THROW_NEW_ERROR_RETURN_FAILURE(
1018 NewTypeError("generator_poison_pill", HandleVector<Object>(NULL, 0)));
1022 // -----------------------------------------------------------------------------
1026 template <bool is_construct>
1027 MUST_USE_RESULT static MaybeHandle<Object> HandleApiCallHelper(
1028 Isolate* isolate, BuiltinArguments<NEEDS_CALLED_FUNCTION>& args) {
1029 HandleScope scope(isolate);
1030 Handle<JSFunction> function = args.called_function();
1031 // TODO(ishell): turn this back to a DCHECK.
1032 CHECK(function->shared()->IsApiFunction());
1034 Handle<FunctionTemplateInfo> fun_data(
1035 function->shared()->get_api_func_data(), isolate);
1037 ASSIGN_RETURN_ON_EXCEPTION(
1039 ApiNatives::ConfigureInstance(isolate, fun_data,
1040 Handle<JSObject>::cast(args.receiver())),
1044 DCHECK(!args[0]->IsNull());
1045 if (args[0]->IsUndefined()) args[0] = function->global_proxy();
1047 if (!is_construct && !fun_data->accept_any_receiver()) {
1048 Handle<Object> receiver(&args[0]);
1049 if (receiver->IsJSObject() && receiver->IsAccessCheckNeeded()) {
1050 Handle<JSObject> js_receiver = Handle<JSObject>::cast(receiver);
1051 if (!isolate->MayAccess(js_receiver)) {
1052 isolate->ReportFailedAccessCheck(js_receiver);
1053 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object);
1058 Object* raw_holder = fun_data->GetCompatibleReceiver(isolate, args[0]);
1060 if (raw_holder->IsNull()) {
1061 // This function cannot be called with the given receiver. Abort!
1063 isolate, NewTypeError("illegal_invocation", HandleVector(&function, 1)),
1067 Object* raw_call_data = fun_data->call_code();
1068 if (!raw_call_data->IsUndefined()) {
1069 // TODO(ishell): remove this debugging code.
1070 CHECK(raw_call_data->IsCallHandlerInfo());
1071 CallHandlerInfo* call_data = CallHandlerInfo::cast(raw_call_data);
1072 Object* callback_obj = call_data->callback();
1073 v8::FunctionCallback callback =
1074 v8::ToCData<v8::FunctionCallback>(callback_obj);
1075 Object* data_obj = call_data->data();
1077 LOG(isolate, ApiObjectAccess("call", JSObject::cast(*args.receiver())));
1078 DCHECK(raw_holder->IsJSObject());
1080 FunctionCallbackArguments custom(isolate,
1088 v8::Handle<v8::Value> value = custom.Call(callback);
1089 Handle<Object> result;
1090 if (value.IsEmpty()) {
1091 result = isolate->factory()->undefined_value();
1093 result = v8::Utils::OpenHandle(*value);
1094 result->VerifyApiCallResultType();
1097 RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, Object);
1098 if (!is_construct || result->IsJSObject()) {
1099 return scope.CloseAndEscape(result);
1103 return scope.CloseAndEscape(args.receiver());
1107 BUILTIN(HandleApiCall) {
1108 HandleScope scope(isolate);
1109 DCHECK(!CalledAsConstructor(isolate));
1110 Handle<Object> result;
1111 ASSIGN_RETURN_FAILURE_ON_EXCEPTION(isolate, result,
1112 HandleApiCallHelper<false>(isolate, args));
1117 BUILTIN(HandleApiCallConstruct) {
1118 HandleScope scope(isolate);
1119 DCHECK(CalledAsConstructor(isolate));
1120 Handle<Object> result;
1121 ASSIGN_RETURN_FAILURE_ON_EXCEPTION(isolate, result,
1122 HandleApiCallHelper<true>(isolate, args));
1129 class RelocatableArguments : public BuiltinArguments<NEEDS_CALLED_FUNCTION>,
1130 public Relocatable {
1132 RelocatableArguments(Isolate* isolate, int length, Object** arguments)
1133 : BuiltinArguments<NEEDS_CALLED_FUNCTION>(length, arguments),
1134 Relocatable(isolate) {}
1136 virtual inline void IterateInstance(ObjectVisitor* v) {
1137 if (length() == 0) return;
1138 v->VisitPointers(lowest_address(), highest_address() + 1);
1142 DISALLOW_COPY_AND_ASSIGN(RelocatableArguments);
1148 MaybeHandle<Object> Builtins::InvokeApiFunction(Handle<JSFunction> function,
1149 Handle<Object> receiver,
1151 Handle<Object> args[]) {
1152 // Construct BuiltinArguments object: function, arguments reversed, receiver.
1153 const int kBufferSize = 32;
1154 Object* small_argv[kBufferSize];
1156 if (argc + 2 <= kBufferSize) {
1159 argv = new Object* [argc + 2];
1161 argv[argc + 1] = *receiver;
1162 for (int i = 0; i < argc; ++i) {
1163 argv[argc - i] = *args[i];
1165 argv[0] = *function;
1166 MaybeHandle<Object> result;
1168 auto isolate = function->GetIsolate();
1169 RelocatableArguments arguments(isolate, argc + 2, &argv[argc + 1]);
1170 result = HandleApiCallHelper<false>(isolate, arguments);
1172 if (argv != small_argv) {
1179 // Helper function to handle calls to non-function objects created through the
1180 // API. The object can be called as either a constructor (using new) or just as
1181 // a function (without new).
1182 MUST_USE_RESULT static Object* HandleApiCallAsFunctionOrConstructor(
1184 bool is_construct_call,
1185 BuiltinArguments<NO_EXTRA_ARGUMENTS> args) {
1186 // Non-functions are never called as constructors. Even if this is an object
1187 // called as a constructor the delegate call is not a construct call.
1188 DCHECK(!CalledAsConstructor(isolate));
1189 Heap* heap = isolate->heap();
1191 Handle<Object> receiver = args.receiver();
1193 // Get the object called.
1194 JSObject* obj = JSObject::cast(*receiver);
1196 // Get the invocation callback from the function descriptor that was
1197 // used to create the called object.
1198 DCHECK(obj->map()->has_instance_call_handler());
1199 JSFunction* constructor = JSFunction::cast(obj->map()->GetConstructor());
1200 // TODO(ishell): turn this back to a DCHECK.
1201 CHECK(constructor->shared()->IsApiFunction());
1203 constructor->shared()->get_api_func_data()->instance_call_handler();
1204 DCHECK(!handler->IsUndefined());
1205 // TODO(ishell): remove this debugging code.
1206 CHECK(handler->IsCallHandlerInfo());
1207 CallHandlerInfo* call_data = CallHandlerInfo::cast(handler);
1208 Object* callback_obj = call_data->callback();
1209 v8::FunctionCallback callback =
1210 v8::ToCData<v8::FunctionCallback>(callback_obj);
1212 // Get the data for the call and perform the callback.
1215 HandleScope scope(isolate);
1216 LOG(isolate, ApiObjectAccess("call non-function", obj));
1218 FunctionCallbackArguments custom(isolate,
1225 v8::Handle<v8::Value> value = custom.Call(callback);
1226 if (value.IsEmpty()) {
1227 result = heap->undefined_value();
1229 result = *reinterpret_cast<Object**>(*value);
1230 result->VerifyApiCallResultType();
1233 // Check for exceptions and return result.
1234 RETURN_FAILURE_IF_SCHEDULED_EXCEPTION(isolate);
1239 // Handle calls to non-function objects created through the API. This delegate
1240 // function is used when the call is a normal function call.
1241 BUILTIN(HandleApiCallAsFunction) {
1242 return HandleApiCallAsFunctionOrConstructor(isolate, false, args);
1246 // Handle calls to non-function objects created through the API. This delegate
1247 // function is used when the call is a construct call.
1248 BUILTIN(HandleApiCallAsConstructor) {
1249 return HandleApiCallAsFunctionOrConstructor(isolate, true, args);
1253 static void Generate_LoadIC_Miss(MacroAssembler* masm) {
1254 LoadIC::GenerateMiss(masm);
1258 static void Generate_LoadIC_Normal(MacroAssembler* masm) {
1259 LoadIC::GenerateNormal(masm);
1263 static void Generate_LoadIC_Getter_ForDeopt(MacroAssembler* masm) {
1264 NamedLoadHandlerCompiler::GenerateLoadViaGetterForDeopt(masm);
1268 static void Generate_LoadIC_Slow(MacroAssembler* masm) {
1269 LoadIC::GenerateRuntimeGetProperty(masm);
1273 static void Generate_KeyedLoadIC_Initialize(MacroAssembler* masm) {
1274 KeyedLoadIC::GenerateInitialize(masm);
1278 static void Generate_KeyedLoadIC_Slow(MacroAssembler* masm) {
1279 KeyedLoadIC::GenerateRuntimeGetProperty(masm);
1283 static void Generate_KeyedLoadIC_Miss(MacroAssembler* masm) {
1284 KeyedLoadIC::GenerateMiss(masm);
1288 static void Generate_KeyedLoadIC_Megamorphic(MacroAssembler* masm) {
1289 KeyedLoadIC::GenerateMegamorphic(masm);
1293 static void Generate_KeyedLoadIC_PreMonomorphic(MacroAssembler* masm) {
1294 KeyedLoadIC::GeneratePreMonomorphic(masm);
1298 static void Generate_StoreIC_Miss(MacroAssembler* masm) {
1299 StoreIC::GenerateMiss(masm);
1303 static void Generate_StoreIC_Normal(MacroAssembler* masm) {
1304 StoreIC::GenerateNormal(masm);
1308 static void Generate_StoreIC_Slow(MacroAssembler* masm) {
1309 NamedStoreHandlerCompiler::GenerateSlow(masm);
1313 static void Generate_KeyedStoreIC_Slow(MacroAssembler* masm) {
1314 ElementHandlerCompiler::GenerateStoreSlow(masm);
1318 static void Generate_StoreIC_Setter_ForDeopt(MacroAssembler* masm) {
1319 NamedStoreHandlerCompiler::GenerateStoreViaSetterForDeopt(masm);
1323 static void Generate_KeyedStoreIC_Megamorphic(MacroAssembler* masm) {
1324 KeyedStoreIC::GenerateMegamorphic(masm, SLOPPY);
1328 static void Generate_KeyedStoreIC_Megamorphic_Strict(MacroAssembler* masm) {
1329 KeyedStoreIC::GenerateMegamorphic(masm, STRICT);
1333 static void Generate_KeyedStoreIC_Miss(MacroAssembler* masm) {
1334 KeyedStoreIC::GenerateMiss(masm);
1338 static void Generate_KeyedStoreIC_Initialize(MacroAssembler* masm) {
1339 KeyedStoreIC::GenerateInitialize(masm);
1343 static void Generate_KeyedStoreIC_Initialize_Strict(MacroAssembler* masm) {
1344 KeyedStoreIC::GenerateInitialize(masm);
1348 static void Generate_KeyedStoreIC_PreMonomorphic(MacroAssembler* masm) {
1349 KeyedStoreIC::GeneratePreMonomorphic(masm);
1353 static void Generate_KeyedStoreIC_PreMonomorphic_Strict(MacroAssembler* masm) {
1354 KeyedStoreIC::GeneratePreMonomorphic(masm);
1358 static void Generate_KeyedStoreIC_SloppyArguments(MacroAssembler* masm) {
1359 KeyedStoreIC::GenerateSloppyArguments(masm);
1363 static void Generate_CallICStub_DebugBreak(MacroAssembler* masm) {
1364 DebugCodegen::GenerateCallICStubDebugBreak(masm);
1368 static void Generate_LoadIC_DebugBreak(MacroAssembler* masm) {
1369 DebugCodegen::GenerateLoadICDebugBreak(masm);
1373 static void Generate_StoreIC_DebugBreak(MacroAssembler* masm) {
1374 DebugCodegen::GenerateStoreICDebugBreak(masm);
1378 static void Generate_KeyedLoadIC_DebugBreak(MacroAssembler* masm) {
1379 DebugCodegen::GenerateKeyedLoadICDebugBreak(masm);
1383 static void Generate_KeyedStoreIC_DebugBreak(MacroAssembler* masm) {
1384 DebugCodegen::GenerateKeyedStoreICDebugBreak(masm);
1388 static void Generate_CompareNilIC_DebugBreak(MacroAssembler* masm) {
1389 DebugCodegen::GenerateCompareNilICDebugBreak(masm);
1393 static void Generate_Return_DebugBreak(MacroAssembler* masm) {
1394 DebugCodegen::GenerateReturnDebugBreak(masm);
1398 static void Generate_CallFunctionStub_DebugBreak(MacroAssembler* masm) {
1399 DebugCodegen::GenerateCallFunctionStubDebugBreak(masm);
1403 static void Generate_CallConstructStub_DebugBreak(MacroAssembler* masm) {
1404 DebugCodegen::GenerateCallConstructStubDebugBreak(masm);
1408 static void Generate_CallConstructStub_Recording_DebugBreak(
1409 MacroAssembler* masm) {
1410 DebugCodegen::GenerateCallConstructStubRecordDebugBreak(masm);
1414 static void Generate_Slot_DebugBreak(MacroAssembler* masm) {
1415 DebugCodegen::GenerateSlotDebugBreak(masm);
1419 static void Generate_PlainReturn_LiveEdit(MacroAssembler* masm) {
1420 DebugCodegen::GeneratePlainReturnLiveEdit(masm);
1424 static void Generate_FrameDropper_LiveEdit(MacroAssembler* masm) {
1425 DebugCodegen::GenerateFrameDropperLiveEdit(masm);
1429 Builtins::Builtins() : initialized_(false) {
1430 memset(builtins_, 0, sizeof(builtins_[0]) * builtin_count);
1431 memset(names_, 0, sizeof(names_[0]) * builtin_count);
1435 Builtins::~Builtins() {
1439 #define DEF_ENUM_C(name, ignore) FUNCTION_ADDR(Builtin_##name),
1440 Address const Builtins::c_functions_[cfunction_count] = {
1441 BUILTIN_LIST_C(DEF_ENUM_C)
1445 #define DEF_JS_NAME(name, ignore) #name,
1446 #define DEF_JS_ARGC(ignore, argc) argc,
1447 const char* const Builtins::javascript_names_[id_count] = {
1448 BUILTINS_LIST_JS(DEF_JS_NAME)
1451 int const Builtins::javascript_argc_[id_count] = {
1452 BUILTINS_LIST_JS(DEF_JS_ARGC)
1457 struct BuiltinDesc {
1460 const char* s_name; // name is only used for generating log information.
1463 BuiltinExtraArguments extra_args;
1466 #define BUILTIN_FUNCTION_TABLE_INIT { V8_ONCE_INIT, {} }
1468 class BuiltinFunctionTable {
1470 BuiltinDesc* functions() {
1471 base::CallOnce(&once_, &Builtins::InitBuiltinFunctionTable);
1475 base::OnceType once_;
1476 BuiltinDesc functions_[Builtins::builtin_count + 1];
1478 friend class Builtins;
1481 static BuiltinFunctionTable builtin_function_table =
1482 BUILTIN_FUNCTION_TABLE_INIT;
1484 // Define array of pointers to generators and C builtin functions.
1485 // We do this in a sort of roundabout way so that we can do the initialization
1486 // within the lexical scope of Builtins:: and within a context where
1487 // Code::Flags names a non-abstract type.
1488 void Builtins::InitBuiltinFunctionTable() {
1489 BuiltinDesc* functions = builtin_function_table.functions_;
1490 functions[builtin_count].generator = NULL;
1491 functions[builtin_count].c_code = NULL;
1492 functions[builtin_count].s_name = NULL;
1493 functions[builtin_count].name = builtin_count;
1494 functions[builtin_count].flags = static_cast<Code::Flags>(0);
1495 functions[builtin_count].extra_args = NO_EXTRA_ARGUMENTS;
1497 #define DEF_FUNCTION_PTR_C(aname, aextra_args) \
1498 functions->generator = FUNCTION_ADDR(Generate_Adaptor); \
1499 functions->c_code = FUNCTION_ADDR(Builtin_##aname); \
1500 functions->s_name = #aname; \
1501 functions->name = c_##aname; \
1502 functions->flags = Code::ComputeFlags(Code::BUILTIN); \
1503 functions->extra_args = aextra_args; \
1506 #define DEF_FUNCTION_PTR_A(aname, kind, state, extra) \
1507 functions->generator = FUNCTION_ADDR(Generate_##aname); \
1508 functions->c_code = NULL; \
1509 functions->s_name = #aname; \
1510 functions->name = k##aname; \
1511 functions->flags = Code::ComputeFlags(Code::kind, \
1514 functions->extra_args = NO_EXTRA_ARGUMENTS; \
1517 #define DEF_FUNCTION_PTR_H(aname, kind) \
1518 functions->generator = FUNCTION_ADDR(Generate_##aname); \
1519 functions->c_code = NULL; \
1520 functions->s_name = #aname; \
1521 functions->name = k##aname; \
1522 functions->flags = Code::ComputeHandlerFlags(Code::kind); \
1523 functions->extra_args = NO_EXTRA_ARGUMENTS; \
1526 BUILTIN_LIST_C(DEF_FUNCTION_PTR_C)
1527 BUILTIN_LIST_A(DEF_FUNCTION_PTR_A)
1528 BUILTIN_LIST_H(DEF_FUNCTION_PTR_H)
1529 BUILTIN_LIST_DEBUG_A(DEF_FUNCTION_PTR_A)
1531 #undef DEF_FUNCTION_PTR_C
1532 #undef DEF_FUNCTION_PTR_A
1536 void Builtins::SetUp(Isolate* isolate, bool create_heap_objects) {
1537 DCHECK(!initialized_);
1539 // Create a scope for the handles in the builtins.
1540 HandleScope scope(isolate);
1542 const BuiltinDesc* functions = builtin_function_table.functions();
1544 // For now we generate builtin adaptor code into a stack-allocated
1545 // buffer, before copying it into individual code objects. Be careful
1546 // with alignment, some platforms don't like unaligned code.
1548 // We can generate a lot of debug code on Arm64.
1549 const size_t buffer_size = 32*KB;
1551 const size_t buffer_size = 8*KB;
1553 union { int force_alignment; byte buffer[buffer_size]; } u;
1555 // Traverse the list of builtins and generate an adaptor in a
1556 // separate code object for each one.
1557 for (int i = 0; i < builtin_count; i++) {
1558 if (create_heap_objects) {
1559 MacroAssembler masm(isolate, u.buffer, sizeof u.buffer);
1560 // Generate the code/adaptor.
1561 typedef void (*Generator)(MacroAssembler*, int, BuiltinExtraArguments);
1562 Generator g = FUNCTION_CAST<Generator>(functions[i].generator);
1563 // We pass all arguments to the generator, but it may not use all of
1564 // them. This works because the first arguments are on top of the
1566 DCHECK(!masm.has_frame());
1567 g(&masm, functions[i].name, functions[i].extra_args);
1568 // Move the code into the object heap.
1570 masm.GetCode(&desc);
1571 Code::Flags flags = functions[i].flags;
1573 isolate->factory()->NewCode(desc, flags, masm.CodeObject());
1574 // Log the event and add the code to the builtins array.
1576 CodeCreateEvent(Logger::BUILTIN_TAG, *code, functions[i].s_name));
1577 builtins_[i] = *code;
1578 code->set_builtin_index(i);
1579 #ifdef ENABLE_DISASSEMBLER
1580 if (FLAG_print_builtin_code) {
1581 CodeTracer::Scope trace_scope(isolate->GetCodeTracer());
1582 OFStream os(trace_scope.file());
1583 os << "Builtin: " << functions[i].s_name << "\n";
1584 code->Disassemble(functions[i].s_name, os);
1589 // Deserializing. The values will be filled in during IterateBuiltins.
1590 builtins_[i] = NULL;
1592 names_[i] = functions[i].s_name;
1595 // Mark as initialized.
1596 initialized_ = true;
1600 void Builtins::TearDown() {
1601 initialized_ = false;
1605 void Builtins::IterateBuiltins(ObjectVisitor* v) {
1606 v->VisitPointers(&builtins_[0], &builtins_[0] + builtin_count);
1610 const char* Builtins::Lookup(byte* pc) {
1611 // may be called during initialization (disassembler!)
1613 for (int i = 0; i < builtin_count; i++) {
1614 Code* entry = Code::cast(builtins_[i]);
1615 if (entry->contains(pc)) {
1624 void Builtins::Generate_InterruptCheck(MacroAssembler* masm) {
1625 masm->TailCallRuntime(Runtime::kInterrupt, 0, 1);
1629 void Builtins::Generate_StackCheck(MacroAssembler* masm) {
1630 masm->TailCallRuntime(Runtime::kStackGuard, 0, 1);
1634 #define DEFINE_BUILTIN_ACCESSOR_C(name, ignore) \
1635 Handle<Code> Builtins::name() { \
1636 Code** code_address = \
1637 reinterpret_cast<Code**>(builtin_address(k##name)); \
1638 return Handle<Code>(code_address); \
1640 #define DEFINE_BUILTIN_ACCESSOR_A(name, kind, state, extra) \
1641 Handle<Code> Builtins::name() { \
1642 Code** code_address = \
1643 reinterpret_cast<Code**>(builtin_address(k##name)); \
1644 return Handle<Code>(code_address); \
1646 #define DEFINE_BUILTIN_ACCESSOR_H(name, kind) \
1647 Handle<Code> Builtins::name() { \
1648 Code** code_address = \
1649 reinterpret_cast<Code**>(builtin_address(k##name)); \
1650 return Handle<Code>(code_address); \
1652 BUILTIN_LIST_C(DEFINE_BUILTIN_ACCESSOR_C)
1653 BUILTIN_LIST_A(DEFINE_BUILTIN_ACCESSOR_A)
1654 BUILTIN_LIST_H(DEFINE_BUILTIN_ACCESSOR_H)
1655 BUILTIN_LIST_DEBUG_A(DEFINE_BUILTIN_ACCESSOR_A)
1656 #undef DEFINE_BUILTIN_ACCESSOR_C
1657 #undef DEFINE_BUILTIN_ACCESSOR_A
1660 } } // namespace v8::internal