1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
8 #include "src/arguments.h"
9 #include "src/base/once.h"
10 #include "src/bootstrapper.h"
11 #include "src/builtins.h"
12 #include "src/cpu-profiler.h"
13 #include "src/gdb-jit.h"
14 #include "src/heap/mark-compact.h"
15 #include "src/heap-profiler.h"
16 #include "src/ic/handler-compiler.h"
17 #include "src/ic/ic.h"
18 #include "src/prototype.h"
19 #include "src/vm-state-inl.h"
26 // Arguments object passed to C++ builtins.
27 template <BuiltinExtraArguments extra_args>
28 class BuiltinArguments : public Arguments {
30 BuiltinArguments(int length, Object** arguments)
31 : Arguments(length, arguments) { }
33 Object*& operator[] (int index) {
34 DCHECK(index < length());
35 return Arguments::operator[](index);
38 template <class S> Handle<S> at(int index) {
39 DCHECK(index < length());
40 return Arguments::at<S>(index);
43 Handle<Object> receiver() {
44 return Arguments::at<Object>(0);
47 Handle<JSFunction> called_function() {
48 STATIC_ASSERT(extra_args == NEEDS_CALLED_FUNCTION);
49 return Arguments::at<JSFunction>(Arguments::length() - 1);
52 // Gets the total number of arguments including the receiver (but
53 // excluding extra arguments).
55 STATIC_ASSERT(extra_args == NO_EXTRA_ARGUMENTS);
56 return Arguments::length();
61 // Check we have at least the receiver.
62 DCHECK(Arguments::length() >= 1);
68 // Specialize BuiltinArguments for the called function extra argument.
71 int BuiltinArguments<NEEDS_CALLED_FUNCTION>::length() const {
72 return Arguments::length() - 1;
77 void BuiltinArguments<NEEDS_CALLED_FUNCTION>::Verify() {
78 // Check we have at least the receiver and the called function.
79 DCHECK(Arguments::length() >= 2);
80 // Make sure cast to JSFunction succeeds.
86 #define DEF_ARG_TYPE(name, spec) \
87 typedef BuiltinArguments<spec> name##ArgumentsType;
88 BUILTIN_LIST_C(DEF_ARG_TYPE)
93 // ----------------------------------------------------------------------------
94 // Support macro for defining builtins in C++.
95 // ----------------------------------------------------------------------------
97 // A builtin function is defined by writing:
103 // In the body of the builtin function the arguments can be accessed
104 // through the BuiltinArguments object args.
108 #define BUILTIN(name) \
109 MUST_USE_RESULT static Object* Builtin_Impl_##name( \
110 name##ArgumentsType args, Isolate* isolate); \
111 MUST_USE_RESULT static Object* Builtin_##name( \
112 int args_length, Object** args_object, Isolate* isolate) { \
113 name##ArgumentsType args(args_length, args_object); \
115 return Builtin_Impl_##name(args, isolate); \
117 MUST_USE_RESULT static Object* Builtin_Impl_##name( \
118 name##ArgumentsType args, Isolate* isolate)
120 #else // For release mode.
122 #define BUILTIN(name) \
123 static Object* Builtin_impl##name( \
124 name##ArgumentsType args, Isolate* isolate); \
125 static Object* Builtin_##name( \
126 int args_length, Object** args_object, Isolate* isolate) { \
127 name##ArgumentsType args(args_length, args_object); \
128 return Builtin_impl##name(args, isolate); \
130 static Object* Builtin_impl##name( \
131 name##ArgumentsType args, Isolate* isolate)
136 static inline bool CalledAsConstructor(Isolate* isolate) {
137 // Calculate the result using a full stack frame iterator and check
138 // that the state of the stack is as we assume it to be in the
140 StackFrameIterator it(isolate);
141 DCHECK(it.frame()->is_exit());
143 StackFrame* frame = it.frame();
144 bool reference_result = frame->is_construct();
145 Address fp = Isolate::c_entry_fp(isolate->thread_local_top());
146 // Because we know fp points to an exit frame we can use the relevant
147 // part of ExitFrame::ComputeCallerState directly.
148 const int kCallerOffset = ExitFrameConstants::kCallerFPOffset;
149 Address caller_fp = Memory::Address_at(fp + kCallerOffset);
150 // This inlines the part of StackFrame::ComputeType that grabs the
151 // type of the current frame. Note that StackFrame::ComputeType
152 // has been specialized for each architecture so if any one of them
153 // changes this code has to be changed as well.
154 const int kMarkerOffset = StandardFrameConstants::kMarkerOffset;
155 const Smi* kConstructMarker = Smi::FromInt(StackFrame::CONSTRUCT);
156 Object* marker = Memory::Object_at(caller_fp + kMarkerOffset);
157 bool result = (marker == kConstructMarker);
158 DCHECK_EQ(result, reference_result);
164 // ----------------------------------------------------------------------------
168 return isolate->heap()->undefined_value(); // Make compiler happy.
172 BUILTIN(EmptyFunction) {
173 return isolate->heap()->undefined_value();
177 static void MoveDoubleElements(FixedDoubleArray* dst, int dst_index,
178 FixedDoubleArray* src, int src_index, int len) {
179 if (len == 0) return;
180 MemMove(dst->data_start() + dst_index, src->data_start() + src_index,
185 static bool ArrayPrototypeHasNoElements(Heap* heap,
186 Context* native_context,
187 JSObject* array_proto) {
188 DisallowHeapAllocation no_gc;
189 // This method depends on non writability of Object and Array prototype
191 if (array_proto->elements() != heap->empty_fixed_array()) return false;
193 PrototypeIterator iter(heap->isolate(), array_proto);
194 if (iter.IsAtEnd()) {
197 array_proto = JSObject::cast(iter.GetCurrent());
198 if (array_proto != native_context->initial_object_prototype()) return false;
199 if (array_proto->elements() != heap->empty_fixed_array()) return false;
201 return iter.IsAtEnd();
205 static inline bool IsJSArrayFastElementMovingAllowed(Heap* heap,
207 if (!FLAG_clever_optimizations) return false;
208 DisallowHeapAllocation no_gc;
209 Context* native_context = heap->isolate()->context()->native_context();
210 JSObject* array_proto =
211 JSObject::cast(native_context->array_function()->prototype());
212 PrototypeIterator iter(heap->isolate(), receiver);
213 return iter.GetCurrent() == array_proto &&
214 ArrayPrototypeHasNoElements(heap, native_context, array_proto);
218 // Returns empty handle if not applicable.
220 static inline MaybeHandle<FixedArrayBase> EnsureJSArrayWithWritableFastElements(
222 Handle<Object> receiver,
224 int first_added_arg) {
225 if (!receiver->IsJSArray()) return MaybeHandle<FixedArrayBase>();
226 Handle<JSArray> array = Handle<JSArray>::cast(receiver);
227 // If there may be elements accessors in the prototype chain, the fast path
228 // cannot be used if there arguments to add to the array.
229 Heap* heap = isolate->heap();
230 if (args != NULL && !IsJSArrayFastElementMovingAllowed(heap, *array)) {
231 return MaybeHandle<FixedArrayBase>();
233 if (array->map()->is_observed()) return MaybeHandle<FixedArrayBase>();
234 if (!array->map()->is_extensible()) return MaybeHandle<FixedArrayBase>();
235 Handle<FixedArrayBase> elms(array->elements(), isolate);
236 Map* map = elms->map();
237 if (map == heap->fixed_array_map()) {
238 if (args == NULL || array->HasFastObjectElements()) return elms;
239 } else if (map == heap->fixed_cow_array_map()) {
240 elms = JSObject::EnsureWritableFastElements(array);
241 if (args == NULL || array->HasFastObjectElements()) return elms;
242 } else if (map == heap->fixed_double_array_map()) {
243 if (args == NULL) return elms;
245 return MaybeHandle<FixedArrayBase>();
248 // Need to ensure that the arguments passed in args can be contained in
250 int args_length = args->length();
251 if (first_added_arg >= args_length) return handle(array->elements(), isolate);
253 ElementsKind origin_kind = array->map()->elements_kind();
254 DCHECK(!IsFastObjectElementsKind(origin_kind));
255 ElementsKind target_kind = origin_kind;
257 DisallowHeapAllocation no_gc;
258 int arg_count = args->length() - first_added_arg;
259 Object** arguments = args->arguments() - first_added_arg - (arg_count - 1);
260 for (int i = 0; i < arg_count; i++) {
261 Object* arg = arguments[i];
262 if (arg->IsHeapObject()) {
263 if (arg->IsHeapNumber()) {
264 target_kind = FAST_DOUBLE_ELEMENTS;
266 target_kind = FAST_ELEMENTS;
272 if (target_kind != origin_kind) {
273 JSObject::TransitionElementsKind(array, target_kind);
274 return handle(array->elements(), isolate);
280 MUST_USE_RESULT static Object* CallJsBuiltin(
283 BuiltinArguments<NO_EXTRA_ARGUMENTS> args) {
284 HandleScope handleScope(isolate);
286 Handle<Object> js_builtin = Object::GetProperty(
288 handle(isolate->native_context()->builtins(), isolate),
289 name).ToHandleChecked();
290 Handle<JSFunction> function = Handle<JSFunction>::cast(js_builtin);
291 int argc = args.length() - 1;
292 ScopedVector<Handle<Object> > argv(argc);
293 for (int i = 0; i < argc; ++i) {
294 argv[i] = args.at<Object>(i + 1);
296 Handle<Object> result;
297 ASSIGN_RETURN_FAILURE_ON_EXCEPTION(
299 Execution::Call(isolate,
309 HandleScope scope(isolate);
310 Handle<Object> receiver = args.receiver();
311 MaybeHandle<FixedArrayBase> maybe_elms_obj =
312 EnsureJSArrayWithWritableFastElements(isolate, receiver, &args, 1);
313 Handle<FixedArrayBase> elms_obj;
314 if (!maybe_elms_obj.ToHandle(&elms_obj)) {
315 return CallJsBuiltin(isolate, "ArrayPush", args);
318 Handle<JSArray> array = Handle<JSArray>::cast(receiver);
319 int len = Smi::cast(array->length())->value();
320 int to_add = args.length() - 1;
321 if (to_add > 0 && JSArray::WouldChangeReadOnlyLength(array, len + to_add)) {
322 return CallJsBuiltin(isolate, "ArrayPush", args);
324 DCHECK(!array->map()->is_observed());
326 ElementsKind kind = array->GetElementsKind();
328 if (IsFastSmiOrObjectElementsKind(kind)) {
329 Handle<FixedArray> elms = Handle<FixedArray>::cast(elms_obj);
331 return Smi::FromInt(len);
333 // Currently fixed arrays cannot grow too big, so
334 // we should never hit this case.
335 DCHECK(to_add <= (Smi::kMaxValue - len));
337 int new_length = len + to_add;
339 if (new_length > elms->length()) {
340 // New backing storage is needed.
341 int capacity = new_length + (new_length >> 1) + 16;
342 Handle<FixedArray> new_elms =
343 isolate->factory()->NewUninitializedFixedArray(capacity);
345 ElementsAccessor* accessor = array->GetElementsAccessor();
346 accessor->CopyElements(
347 elms_obj, 0, kind, new_elms, 0,
348 ElementsAccessor::kCopyToEndAndInitializeToHole);
353 // Add the provided values.
354 DisallowHeapAllocation no_gc;
355 WriteBarrierMode mode = elms->GetWriteBarrierMode(no_gc);
356 for (int index = 0; index < to_add; index++) {
357 elms->set(index + len, args[index + 1], mode);
360 if (*elms != array->elements()) {
361 array->set_elements(*elms);
365 array->set_length(Smi::FromInt(new_length));
366 return Smi::FromInt(new_length);
368 int elms_len = elms_obj->length();
370 return Smi::FromInt(len);
372 // Currently fixed arrays cannot grow too big, so
373 // we should never hit this case.
374 DCHECK(to_add <= (Smi::kMaxValue - len));
376 int new_length = len + to_add;
378 Handle<FixedDoubleArray> new_elms;
380 if (new_length > elms_len) {
381 // New backing storage is needed.
382 int capacity = new_length + (new_length >> 1) + 16;
383 // Create new backing store; since capacity > 0, we can
384 // safely cast to FixedDoubleArray.
385 new_elms = Handle<FixedDoubleArray>::cast(
386 isolate->factory()->NewFixedDoubleArray(capacity));
388 ElementsAccessor* accessor = array->GetElementsAccessor();
389 accessor->CopyElements(
390 elms_obj, 0, kind, new_elms, 0,
391 ElementsAccessor::kCopyToEndAndInitializeToHole);
394 // to_add is > 0 and new_length <= elms_len, so elms_obj cannot be the
395 // empty_fixed_array.
396 new_elms = Handle<FixedDoubleArray>::cast(elms_obj);
399 // Add the provided values.
400 DisallowHeapAllocation no_gc;
402 for (index = 0; index < to_add; index++) {
403 Object* arg = args[index + 1];
404 new_elms->set(index + len, arg->Number());
407 if (*new_elms != array->elements()) {
408 array->set_elements(*new_elms);
412 array->set_length(Smi::FromInt(new_length));
413 return Smi::FromInt(new_length);
419 HandleScope scope(isolate);
420 Handle<Object> receiver = args.receiver();
421 MaybeHandle<FixedArrayBase> maybe_elms_obj =
422 EnsureJSArrayWithWritableFastElements(isolate, receiver, NULL, 0);
423 Handle<FixedArrayBase> elms_obj;
424 if (!maybe_elms_obj.ToHandle(&elms_obj)) {
425 return CallJsBuiltin(isolate, "ArrayPop", args);
428 Handle<JSArray> array = Handle<JSArray>::cast(receiver);
429 DCHECK(!array->map()->is_observed());
431 int len = Smi::cast(array->length())->value();
432 if (len == 0) return isolate->heap()->undefined_value();
434 ElementsAccessor* accessor = array->GetElementsAccessor();
435 int new_length = len - 1;
436 Handle<Object> element =
437 accessor->Get(array, array, new_length, elms_obj).ToHandleChecked();
438 if (element->IsTheHole()) {
439 return CallJsBuiltin(isolate, "ArrayPop", args);
441 RETURN_FAILURE_ON_EXCEPTION(
443 accessor->SetLength(array, handle(Smi::FromInt(new_length), isolate)));
448 BUILTIN(ArrayShift) {
449 HandleScope scope(isolate);
450 Heap* heap = isolate->heap();
451 Handle<Object> receiver = args.receiver();
452 MaybeHandle<FixedArrayBase> maybe_elms_obj =
453 EnsureJSArrayWithWritableFastElements(isolate, receiver, NULL, 0);
454 Handle<FixedArrayBase> elms_obj;
455 if (!maybe_elms_obj.ToHandle(&elms_obj) ||
456 !IsJSArrayFastElementMovingAllowed(heap, JSArray::cast(*receiver))) {
457 return CallJsBuiltin(isolate, "ArrayShift", args);
459 Handle<JSArray> array = Handle<JSArray>::cast(receiver);
460 DCHECK(!array->map()->is_observed());
462 int len = Smi::cast(array->length())->value();
463 if (len == 0) return heap->undefined_value();
466 ElementsAccessor* accessor = array->GetElementsAccessor();
467 Handle<Object> first =
468 accessor->Get(array, array, 0, elms_obj).ToHandleChecked();
469 if (first->IsTheHole()) {
470 return CallJsBuiltin(isolate, "ArrayShift", args);
473 if (heap->CanMoveObjectStart(*elms_obj)) {
474 array->set_elements(heap->LeftTrimFixedArray(*elms_obj, 1));
476 // Shift the elements.
477 if (elms_obj->IsFixedArray()) {
478 Handle<FixedArray> elms = Handle<FixedArray>::cast(elms_obj);
479 DisallowHeapAllocation no_gc;
480 heap->MoveElements(*elms, 0, 1, len - 1);
481 elms->set(len - 1, heap->the_hole_value());
483 Handle<FixedDoubleArray> elms = Handle<FixedDoubleArray>::cast(elms_obj);
484 MoveDoubleElements(*elms, 0, *elms, 1, len - 1);
485 elms->set_the_hole(len - 1);
490 array->set_length(Smi::FromInt(len - 1));
496 BUILTIN(ArrayUnshift) {
497 HandleScope scope(isolate);
498 Heap* heap = isolate->heap();
499 Handle<Object> receiver = args.receiver();
500 MaybeHandle<FixedArrayBase> maybe_elms_obj =
501 EnsureJSArrayWithWritableFastElements(isolate, receiver, &args, 1);
502 Handle<FixedArrayBase> elms_obj;
503 if (!maybe_elms_obj.ToHandle(&elms_obj)) {
504 return CallJsBuiltin(isolate, "ArrayUnshift", args);
506 Handle<JSArray> array = Handle<JSArray>::cast(receiver);
507 DCHECK(!array->map()->is_observed());
508 if (!array->HasFastSmiOrObjectElements()) {
509 return CallJsBuiltin(isolate, "ArrayUnshift", args);
511 int len = Smi::cast(array->length())->value();
512 int to_add = args.length() - 1;
513 int new_length = len + to_add;
514 // Currently fixed arrays cannot grow too big, so
515 // we should never hit this case.
516 DCHECK(to_add <= (Smi::kMaxValue - len));
518 if (to_add > 0 && JSArray::WouldChangeReadOnlyLength(array, len + to_add)) {
519 return CallJsBuiltin(isolate, "ArrayUnshift", args);
522 Handle<FixedArray> elms = Handle<FixedArray>::cast(elms_obj);
524 if (new_length > elms->length()) {
525 // New backing storage is needed.
526 int capacity = new_length + (new_length >> 1) + 16;
527 Handle<FixedArray> new_elms =
528 isolate->factory()->NewUninitializedFixedArray(capacity);
530 ElementsKind kind = array->GetElementsKind();
531 ElementsAccessor* accessor = array->GetElementsAccessor();
532 accessor->CopyElements(
533 elms, 0, kind, new_elms, to_add,
534 ElementsAccessor::kCopyToEndAndInitializeToHole);
537 array->set_elements(*elms);
539 DisallowHeapAllocation no_gc;
540 heap->MoveElements(*elms, to_add, 0, len);
543 // Add the provided values.
544 DisallowHeapAllocation no_gc;
545 WriteBarrierMode mode = elms->GetWriteBarrierMode(no_gc);
546 for (int i = 0; i < to_add; i++) {
547 elms->set(i, args[i + 1], mode);
551 array->set_length(Smi::FromInt(new_length));
552 return Smi::FromInt(new_length);
556 BUILTIN(ArraySlice) {
557 HandleScope scope(isolate);
558 Heap* heap = isolate->heap();
559 Handle<Object> receiver = args.receiver();
561 int relative_start = 0;
562 int relative_end = 0;
564 DisallowHeapAllocation no_gc;
565 if (receiver->IsJSArray()) {
566 JSArray* array = JSArray::cast(*receiver);
567 if (!IsJSArrayFastElementMovingAllowed(heap, array)) {
568 AllowHeapAllocation allow_allocation;
569 return CallJsBuiltin(isolate, "ArraySlice", args);
572 if (!array->HasFastElements()) {
573 AllowHeapAllocation allow_allocation;
574 return CallJsBuiltin(isolate, "ArraySlice", args);
577 len = Smi::cast(array->length())->value();
579 // Array.slice(arguments, ...) is quite a common idiom (notably more
580 // than 50% of invocations in Web apps). Treat it in C++ as well.
582 isolate->context()->native_context()->sloppy_arguments_map();
584 bool is_arguments_object_with_fast_elements =
585 receiver->IsJSObject() &&
586 JSObject::cast(*receiver)->map() == arguments_map;
587 if (!is_arguments_object_with_fast_elements) {
588 AllowHeapAllocation allow_allocation;
589 return CallJsBuiltin(isolate, "ArraySlice", args);
591 JSObject* object = JSObject::cast(*receiver);
593 if (!object->HasFastElements()) {
594 AllowHeapAllocation allow_allocation;
595 return CallJsBuiltin(isolate, "ArraySlice", args);
598 Object* len_obj = object->InObjectPropertyAt(Heap::kArgumentsLengthIndex);
599 if (!len_obj->IsSmi()) {
600 AllowHeapAllocation allow_allocation;
601 return CallJsBuiltin(isolate, "ArraySlice", args);
603 len = Smi::cast(len_obj)->value();
604 if (len > object->elements()->length()) {
605 AllowHeapAllocation allow_allocation;
606 return CallJsBuiltin(isolate, "ArraySlice", args);
611 int n_arguments = args.length() - 1;
613 // Note carefully choosen defaults---if argument is missing,
614 // it's undefined which gets converted to 0 for relative_start
615 // and to len for relative_end.
618 if (n_arguments > 0) {
619 Object* arg1 = args[1];
621 relative_start = Smi::cast(arg1)->value();
622 } else if (arg1->IsHeapNumber()) {
623 double start = HeapNumber::cast(arg1)->value();
624 if (start < kMinInt || start > kMaxInt) {
625 AllowHeapAllocation allow_allocation;
626 return CallJsBuiltin(isolate, "ArraySlice", args);
628 relative_start = std::isnan(start) ? 0 : static_cast<int>(start);
629 } else if (!arg1->IsUndefined()) {
630 AllowHeapAllocation allow_allocation;
631 return CallJsBuiltin(isolate, "ArraySlice", args);
633 if (n_arguments > 1) {
634 Object* arg2 = args[2];
636 relative_end = Smi::cast(arg2)->value();
637 } else if (arg2->IsHeapNumber()) {
638 double end = HeapNumber::cast(arg2)->value();
639 if (end < kMinInt || end > kMaxInt) {
640 AllowHeapAllocation allow_allocation;
641 return CallJsBuiltin(isolate, "ArraySlice", args);
643 relative_end = std::isnan(end) ? 0 : static_cast<int>(end);
644 } else if (!arg2->IsUndefined()) {
645 AllowHeapAllocation allow_allocation;
646 return CallJsBuiltin(isolate, "ArraySlice", args);
652 // ECMAScript 232, 3rd Edition, Section 15.4.4.10, step 6.
653 int k = (relative_start < 0) ? Max(len + relative_start, 0)
654 : Min(relative_start, len);
656 // ECMAScript 232, 3rd Edition, Section 15.4.4.10, step 8.
657 int final = (relative_end < 0) ? Max(len + relative_end, 0)
658 : Min(relative_end, len);
660 // Calculate the length of result array.
661 int result_len = Max(final - k, 0);
663 Handle<JSObject> object = Handle<JSObject>::cast(receiver);
664 Handle<FixedArrayBase> elms(object->elements(), isolate);
666 ElementsKind kind = object->GetElementsKind();
667 if (IsHoleyElementsKind(kind)) {
668 DisallowHeapAllocation no_gc;
670 ElementsAccessor* accessor = ElementsAccessor::ForKind(kind);
671 for (int i = k; i < final; i++) {
672 if (!accessor->HasElement(object, object, i, elms)) {
678 kind = GetPackedElementsKind(kind);
679 } else if (!receiver->IsJSArray()) {
680 AllowHeapAllocation allow_allocation;
681 return CallJsBuiltin(isolate, "ArraySlice", args);
685 Handle<JSArray> result_array =
686 isolate->factory()->NewJSArray(kind, result_len, result_len);
688 DisallowHeapAllocation no_gc;
689 if (result_len == 0) return *result_array;
691 ElementsAccessor* accessor = object->GetElementsAccessor();
692 accessor->CopyElements(
693 elms, k, kind, handle(result_array->elements(), isolate), 0, result_len);
694 return *result_array;
698 BUILTIN(ArraySplice) {
699 HandleScope scope(isolate);
700 Heap* heap = isolate->heap();
701 Handle<Object> receiver = args.receiver();
702 MaybeHandle<FixedArrayBase> maybe_elms_obj =
703 EnsureJSArrayWithWritableFastElements(isolate, receiver, &args, 3);
704 Handle<FixedArrayBase> elms_obj;
705 if (!maybe_elms_obj.ToHandle(&elms_obj)) {
706 return CallJsBuiltin(isolate, "ArraySplice", args);
708 Handle<JSArray> array = Handle<JSArray>::cast(receiver);
709 DCHECK(!array->map()->is_observed());
711 int len = Smi::cast(array->length())->value();
713 int n_arguments = args.length() - 1;
715 int relative_start = 0;
716 if (n_arguments > 0) {
717 DisallowHeapAllocation no_gc;
718 Object* arg1 = args[1];
720 relative_start = Smi::cast(arg1)->value();
721 } else if (arg1->IsHeapNumber()) {
722 double start = HeapNumber::cast(arg1)->value();
723 if (start < kMinInt || start > kMaxInt) {
724 AllowHeapAllocation allow_allocation;
725 return CallJsBuiltin(isolate, "ArraySplice", args);
727 relative_start = std::isnan(start) ? 0 : static_cast<int>(start);
728 } else if (!arg1->IsUndefined()) {
729 AllowHeapAllocation allow_allocation;
730 return CallJsBuiltin(isolate, "ArraySplice", args);
733 int actual_start = (relative_start < 0) ? Max(len + relative_start, 0)
734 : Min(relative_start, len);
736 // SpiderMonkey, TraceMonkey and JSC treat the case where no delete count is
737 // given as a request to delete all the elements from the start.
738 // And it differs from the case of undefined delete count.
739 // This does not follow ECMA-262, but we do the same for
741 int actual_delete_count;
742 if (n_arguments == 1) {
743 DCHECK(len - actual_start >= 0);
744 actual_delete_count = len - actual_start;
746 int value = 0; // ToInteger(undefined) == 0
747 if (n_arguments > 1) {
748 DisallowHeapAllocation no_gc;
749 Object* arg2 = args[2];
751 value = Smi::cast(arg2)->value();
753 AllowHeapAllocation allow_allocation;
754 return CallJsBuiltin(isolate, "ArraySplice", args);
757 actual_delete_count = Min(Max(value, 0), len - actual_start);
760 ElementsKind elements_kind = array->GetElementsKind();
762 int item_count = (n_arguments > 1) ? (n_arguments - 2) : 0;
763 int new_length = len - actual_delete_count + item_count;
765 // For double mode we do not support changing the length.
766 if (new_length > len && IsFastDoubleElementsKind(elements_kind)) {
767 return CallJsBuiltin(isolate, "ArraySplice", args);
770 if (new_length == 0) {
771 Handle<JSArray> result = isolate->factory()->NewJSArrayWithElements(
772 elms_obj, elements_kind, actual_delete_count);
773 array->set_elements(heap->empty_fixed_array());
774 array->set_length(Smi::FromInt(0));
778 Handle<JSArray> result_array =
779 isolate->factory()->NewJSArray(elements_kind,
781 actual_delete_count);
783 if (actual_delete_count > 0) {
784 DisallowHeapAllocation no_gc;
785 ElementsAccessor* accessor = array->GetElementsAccessor();
786 accessor->CopyElements(
787 elms_obj, actual_start, elements_kind,
788 handle(result_array->elements(), isolate), 0, actual_delete_count);
791 bool elms_changed = false;
792 if (item_count < actual_delete_count) {
794 const bool trim_array = !heap->lo_space()->Contains(*elms_obj) &&
795 ((actual_start + item_count) <
796 (len - actual_delete_count - actual_start));
798 const int delta = actual_delete_count - item_count;
800 if (elms_obj->IsFixedDoubleArray()) {
801 Handle<FixedDoubleArray> elms =
802 Handle<FixedDoubleArray>::cast(elms_obj);
803 MoveDoubleElements(*elms, delta, *elms, 0, actual_start);
805 Handle<FixedArray> elms = Handle<FixedArray>::cast(elms_obj);
806 DisallowHeapAllocation no_gc;
807 heap->MoveElements(*elms, delta, 0, actual_start);
810 if (heap->CanMoveObjectStart(*elms_obj)) {
811 // On the fast path we move the start of the object in memory.
812 elms_obj = handle(heap->LeftTrimFixedArray(*elms_obj, delta));
814 // This is the slow path. We are going to move the elements to the left
815 // by copying them. For trimmed values we store the hole.
816 if (elms_obj->IsFixedDoubleArray()) {
817 Handle<FixedDoubleArray> elms =
818 Handle<FixedDoubleArray>::cast(elms_obj);
819 MoveDoubleElements(*elms, 0, *elms, delta, len - delta);
820 elms->FillWithHoles(len - delta, len);
822 Handle<FixedArray> elms = Handle<FixedArray>::cast(elms_obj);
823 DisallowHeapAllocation no_gc;
824 heap->MoveElements(*elms, 0, delta, len - delta);
825 elms->FillWithHoles(len - delta, len);
830 if (elms_obj->IsFixedDoubleArray()) {
831 Handle<FixedDoubleArray> elms =
832 Handle<FixedDoubleArray>::cast(elms_obj);
833 MoveDoubleElements(*elms, actual_start + item_count,
834 *elms, actual_start + actual_delete_count,
835 (len - actual_delete_count - actual_start));
836 elms->FillWithHoles(new_length, len);
838 Handle<FixedArray> elms = Handle<FixedArray>::cast(elms_obj);
839 DisallowHeapAllocation no_gc;
840 heap->MoveElements(*elms, actual_start + item_count,
841 actual_start + actual_delete_count,
842 (len - actual_delete_count - actual_start));
843 elms->FillWithHoles(new_length, len);
846 } else if (item_count > actual_delete_count) {
847 Handle<FixedArray> elms = Handle<FixedArray>::cast(elms_obj);
848 // Currently fixed arrays cannot grow too big, so
849 // we should never hit this case.
850 DCHECK((item_count - actual_delete_count) <= (Smi::kMaxValue - len));
852 // Check if array need to grow.
853 if (new_length > elms->length()) {
854 // New backing storage is needed.
855 int capacity = new_length + (new_length >> 1) + 16;
856 Handle<FixedArray> new_elms =
857 isolate->factory()->NewUninitializedFixedArray(capacity);
859 DisallowHeapAllocation no_gc;
861 ElementsKind kind = array->GetElementsKind();
862 ElementsAccessor* accessor = array->GetElementsAccessor();
863 if (actual_start > 0) {
864 // Copy the part before actual_start as is.
865 accessor->CopyElements(
866 elms, 0, kind, new_elms, 0, actual_start);
868 accessor->CopyElements(
869 elms, actual_start + actual_delete_count, kind,
870 new_elms, actual_start + item_count,
871 ElementsAccessor::kCopyToEndAndInitializeToHole);
876 DisallowHeapAllocation no_gc;
877 heap->MoveElements(*elms, actual_start + item_count,
878 actual_start + actual_delete_count,
879 (len - actual_delete_count - actual_start));
883 if (IsFastDoubleElementsKind(elements_kind)) {
884 Handle<FixedDoubleArray> elms = Handle<FixedDoubleArray>::cast(elms_obj);
885 for (int k = actual_start; k < actual_start + item_count; k++) {
886 Object* arg = args[3 + k - actual_start];
888 elms->set(k, Smi::cast(arg)->value());
890 elms->set(k, HeapNumber::cast(arg)->value());
894 Handle<FixedArray> elms = Handle<FixedArray>::cast(elms_obj);
895 DisallowHeapAllocation no_gc;
896 WriteBarrierMode mode = elms->GetWriteBarrierMode(no_gc);
897 for (int k = actual_start; k < actual_start + item_count; k++) {
898 elms->set(k, args[3 + k - actual_start], mode);
903 array->set_elements(*elms_obj);
906 array->set_length(Smi::FromInt(new_length));
908 return *result_array;
912 BUILTIN(ArrayConcat) {
913 HandleScope scope(isolate);
915 int n_arguments = args.length();
917 ElementsKind elements_kind = GetInitialFastElementsKind();
918 bool has_double = false;
920 DisallowHeapAllocation no_gc;
921 Heap* heap = isolate->heap();
922 Context* native_context = isolate->context()->native_context();
923 JSObject* array_proto =
924 JSObject::cast(native_context->array_function()->prototype());
925 if (!ArrayPrototypeHasNoElements(heap, native_context, array_proto)) {
926 AllowHeapAllocation allow_allocation;
927 return CallJsBuiltin(isolate, "ArrayConcatJS", args);
930 // Iterate through all the arguments performing checks
931 // and calculating total length.
932 bool is_holey = false;
933 for (int i = 0; i < n_arguments; i++) {
934 Object* arg = args[i];
935 PrototypeIterator iter(isolate, arg);
936 if (!arg->IsJSArray() || !JSArray::cast(arg)->HasFastElements() ||
937 iter.GetCurrent() != array_proto) {
938 AllowHeapAllocation allow_allocation;
939 return CallJsBuiltin(isolate, "ArrayConcatJS", args);
941 int len = Smi::cast(JSArray::cast(arg)->length())->value();
943 // We shouldn't overflow when adding another len.
944 const int kHalfOfMaxInt = 1 << (kBitsPerInt - 2);
945 STATIC_ASSERT(FixedArray::kMaxLength < kHalfOfMaxInt);
948 DCHECK(result_len >= 0);
950 if (result_len > FixedDoubleArray::kMaxLength) {
951 AllowHeapAllocation allow_allocation;
952 return CallJsBuiltin(isolate, "ArrayConcatJS", args);
955 ElementsKind arg_kind = JSArray::cast(arg)->map()->elements_kind();
956 has_double = has_double || IsFastDoubleElementsKind(arg_kind);
957 is_holey = is_holey || IsFastHoleyElementsKind(arg_kind);
958 if (IsMoreGeneralElementsKindTransition(elements_kind, arg_kind)) {
959 elements_kind = arg_kind;
962 if (is_holey) elements_kind = GetHoleyElementsKind(elements_kind);
965 // If a double array is concatted into a fast elements array, the fast
966 // elements array needs to be initialized to contain proper holes, since
967 // boxing doubles may cause incremental marking.
968 ArrayStorageAllocationMode mode =
969 has_double && IsFastObjectElementsKind(elements_kind)
970 ? INITIALIZE_ARRAY_ELEMENTS_WITH_HOLE : DONT_INITIALIZE_ARRAY_ELEMENTS;
971 Handle<JSArray> result_array =
972 isolate->factory()->NewJSArray(elements_kind,
976 if (result_len == 0) return *result_array;
979 Handle<FixedArrayBase> storage(result_array->elements(), isolate);
980 ElementsAccessor* accessor = ElementsAccessor::ForKind(elements_kind);
981 for (int i = 0; i < n_arguments; i++) {
982 // It is crucial to keep |array| in a raw pointer form to avoid performance
984 JSArray* array = JSArray::cast(args[i]);
985 int len = Smi::cast(array->length())->value();
987 ElementsKind from_kind = array->GetElementsKind();
988 accessor->CopyElements(array, 0, from_kind, storage, j, len);
993 DCHECK(j == result_len);
995 return *result_array;
999 // -----------------------------------------------------------------------------
1000 // Generator and strict mode poison pills
1003 BUILTIN(StrictModePoisonPill) {
1004 HandleScope scope(isolate);
1005 THROW_NEW_ERROR_RETURN_FAILURE(
1007 NewTypeError("strict_poison_pill", HandleVector<Object>(NULL, 0)));
1011 BUILTIN(GeneratorPoisonPill) {
1012 HandleScope scope(isolate);
1013 THROW_NEW_ERROR_RETURN_FAILURE(
1015 NewTypeError("generator_poison_pill", HandleVector<Object>(NULL, 0)));
1019 // -----------------------------------------------------------------------------
1023 // Searches the hidden prototype chain of the given object for the first
1024 // object that is an instance of the given type. If no such object can
1025 // be found then Heap::null_value() is returned.
1026 static inline Object* FindHidden(Heap* heap,
1028 FunctionTemplateInfo* type) {
1029 for (PrototypeIterator iter(heap->isolate(), object,
1030 PrototypeIterator::START_AT_RECEIVER);
1031 !iter.IsAtEnd(PrototypeIterator::END_AT_NON_HIDDEN); iter.Advance()) {
1032 if (type->IsTemplateFor(iter.GetCurrent())) {
1033 return iter.GetCurrent();
1036 return heap->null_value();
1040 // Returns the holder JSObject if the function can legally be called
1041 // with this receiver. Returns Heap::null_value() if the call is
1042 // illegal. Any arguments that don't fit the expected type is
1043 // overwritten with undefined. Note that holder and the arguments are
1044 // implicitly rewritten with the first object in the hidden prototype
1045 // chain that actually has the expected type.
1046 static inline Object* TypeCheck(Heap* heap,
1049 FunctionTemplateInfo* info) {
1050 Object* recv = argv[0];
1051 // API calls are only supported with JSObject receivers.
1052 if (!recv->IsJSObject()) return heap->null_value();
1053 Object* sig_obj = info->signature();
1054 if (sig_obj->IsUndefined()) return recv;
1055 SignatureInfo* sig = SignatureInfo::cast(sig_obj);
1056 // If necessary, check the receiver
1057 Object* recv_type = sig->receiver();
1058 Object* holder = recv;
1059 if (!recv_type->IsUndefined()) {
1060 holder = FindHidden(heap, holder, FunctionTemplateInfo::cast(recv_type));
1061 if (holder == heap->null_value()) return heap->null_value();
1063 Object* args_obj = sig->args();
1064 // If there is no argument signature we're done
1065 if (args_obj->IsUndefined()) return holder;
1066 FixedArray* args = FixedArray::cast(args_obj);
1067 int length = args->length();
1068 if (argc <= length) length = argc - 1;
1069 for (int i = 0; i < length; i++) {
1070 Object* argtype = args->get(i);
1071 if (argtype->IsUndefined()) continue;
1072 Object** arg = &argv[-1 - i];
1073 Object* current = *arg;
1074 current = FindHidden(heap, current, FunctionTemplateInfo::cast(argtype));
1075 if (current == heap->null_value()) current = heap->undefined_value();
1082 template <bool is_construct>
1083 MUST_USE_RESULT static Object* HandleApiCallHelper(
1084 BuiltinArguments<NEEDS_CALLED_FUNCTION> args, Isolate* isolate) {
1085 DCHECK(is_construct == CalledAsConstructor(isolate));
1086 Heap* heap = isolate->heap();
1088 HandleScope scope(isolate);
1089 Handle<JSFunction> function = args.called_function();
1090 DCHECK(function->shared()->IsApiFunction());
1092 Handle<FunctionTemplateInfo> fun_data(
1093 function->shared()->get_api_func_data(), isolate);
1095 ASSIGN_RETURN_FAILURE_ON_EXCEPTION(
1097 isolate->factory()->ConfigureInstance(
1098 fun_data, Handle<JSObject>::cast(args.receiver())));
1101 SharedFunctionInfo* shared = function->shared();
1102 if (shared->strict_mode() == SLOPPY && !shared->native()) {
1103 Object* recv = args[0];
1104 DCHECK(!recv->IsNull());
1105 if (recv->IsUndefined()) args[0] = function->global_proxy();
1108 Object* raw_holder = TypeCheck(heap, args.length(), &args[0], *fun_data);
1110 if (raw_holder->IsNull()) {
1111 // This function cannot be called with the given receiver. Abort!
1112 THROW_NEW_ERROR_RETURN_FAILURE(
1114 NewTypeError("illegal_invocation", HandleVector(&function, 1)));
1117 Object* raw_call_data = fun_data->call_code();
1118 if (!raw_call_data->IsUndefined()) {
1119 CallHandlerInfo* call_data = CallHandlerInfo::cast(raw_call_data);
1120 Object* callback_obj = call_data->callback();
1121 v8::FunctionCallback callback =
1122 v8::ToCData<v8::FunctionCallback>(callback_obj);
1123 Object* data_obj = call_data->data();
1126 LOG(isolate, ApiObjectAccess("call", JSObject::cast(*args.receiver())));
1127 DCHECK(raw_holder->IsJSObject());
1129 FunctionCallbackArguments custom(isolate,
1137 v8::Handle<v8::Value> value = custom.Call(callback);
1138 if (value.IsEmpty()) {
1139 result = heap->undefined_value();
1141 result = *reinterpret_cast<Object**>(*value);
1142 result->VerifyApiCallResultType();
1145 RETURN_FAILURE_IF_SCHEDULED_EXCEPTION(isolate);
1146 if (!is_construct || result->IsJSObject()) return result;
1149 return *args.receiver();
1153 BUILTIN(HandleApiCall) {
1154 return HandleApiCallHelper<false>(args, isolate);
1158 BUILTIN(HandleApiCallConstruct) {
1159 return HandleApiCallHelper<true>(args, isolate);
1163 // Helper function to handle calls to non-function objects created through the
1164 // API. The object can be called as either a constructor (using new) or just as
1165 // a function (without new).
1166 MUST_USE_RESULT static Object* HandleApiCallAsFunctionOrConstructor(
1168 bool is_construct_call,
1169 BuiltinArguments<NO_EXTRA_ARGUMENTS> args) {
1170 // Non-functions are never called as constructors. Even if this is an object
1171 // called as a constructor the delegate call is not a construct call.
1172 DCHECK(!CalledAsConstructor(isolate));
1173 Heap* heap = isolate->heap();
1175 Handle<Object> receiver = args.receiver();
1177 // Get the object called.
1178 JSObject* obj = JSObject::cast(*receiver);
1180 // Get the invocation callback from the function descriptor that was
1181 // used to create the called object.
1182 DCHECK(obj->map()->has_instance_call_handler());
1183 JSFunction* constructor = JSFunction::cast(obj->map()->constructor());
1184 DCHECK(constructor->shared()->IsApiFunction());
1186 constructor->shared()->get_api_func_data()->instance_call_handler();
1187 DCHECK(!handler->IsUndefined());
1188 CallHandlerInfo* call_data = CallHandlerInfo::cast(handler);
1189 Object* callback_obj = call_data->callback();
1190 v8::FunctionCallback callback =
1191 v8::ToCData<v8::FunctionCallback>(callback_obj);
1193 // Get the data for the call and perform the callback.
1196 HandleScope scope(isolate);
1197 LOG(isolate, ApiObjectAccess("call non-function", obj));
1199 FunctionCallbackArguments custom(isolate,
1206 v8::Handle<v8::Value> value = custom.Call(callback);
1207 if (value.IsEmpty()) {
1208 result = heap->undefined_value();
1210 result = *reinterpret_cast<Object**>(*value);
1211 result->VerifyApiCallResultType();
1214 // Check for exceptions and return result.
1215 RETURN_FAILURE_IF_SCHEDULED_EXCEPTION(isolate);
1220 // Handle calls to non-function objects created through the API. This delegate
1221 // function is used when the call is a normal function call.
1222 BUILTIN(HandleApiCallAsFunction) {
1223 return HandleApiCallAsFunctionOrConstructor(isolate, false, args);
1227 // Handle calls to non-function objects created through the API. This delegate
1228 // function is used when the call is a construct call.
1229 BUILTIN(HandleApiCallAsConstructor) {
1230 return HandleApiCallAsFunctionOrConstructor(isolate, true, args);
1234 static void Generate_LoadIC_Miss(MacroAssembler* masm) {
1235 LoadIC::GenerateMiss(masm);
1239 static void Generate_LoadIC_Normal(MacroAssembler* masm) {
1240 LoadIC::GenerateNormal(masm);
1244 static void Generate_LoadIC_Getter_ForDeopt(MacroAssembler* masm) {
1245 NamedLoadHandlerCompiler::GenerateLoadViaGetterForDeopt(masm);
1249 static void Generate_LoadIC_Slow(MacroAssembler* masm) {
1250 LoadIC::GenerateRuntimeGetProperty(masm);
1254 static void Generate_KeyedLoadIC_Initialize(MacroAssembler* masm) {
1255 KeyedLoadIC::GenerateInitialize(masm);
1259 static void Generate_KeyedLoadIC_Slow(MacroAssembler* masm) {
1260 KeyedLoadIC::GenerateRuntimeGetProperty(masm);
1264 static void Generate_KeyedLoadIC_Miss(MacroAssembler* masm) {
1265 KeyedLoadIC::GenerateMiss(masm);
1269 static void Generate_KeyedLoadIC_Generic(MacroAssembler* masm) {
1270 KeyedLoadIC::GenerateGeneric(masm);
1274 static void Generate_KeyedLoadIC_PreMonomorphic(MacroAssembler* masm) {
1275 KeyedLoadIC::GeneratePreMonomorphic(masm);
1279 static void Generate_StoreIC_Miss(MacroAssembler* masm) {
1280 StoreIC::GenerateMiss(masm);
1284 static void Generate_StoreIC_Normal(MacroAssembler* masm) {
1285 StoreIC::GenerateNormal(masm);
1289 static void Generate_StoreIC_Slow(MacroAssembler* masm) {
1290 NamedStoreHandlerCompiler::GenerateSlow(masm);
1294 static void Generate_KeyedStoreIC_Slow(MacroAssembler* masm) {
1295 ElementHandlerCompiler::GenerateStoreSlow(masm);
1299 static void Generate_StoreIC_Setter_ForDeopt(MacroAssembler* masm) {
1300 NamedStoreHandlerCompiler::GenerateStoreViaSetterForDeopt(masm);
1304 static void Generate_KeyedStoreIC_Megamorphic(MacroAssembler* masm) {
1305 KeyedStoreIC::GenerateMegamorphic(masm, SLOPPY);
1309 static void Generate_KeyedStoreIC_Megamorphic_Strict(MacroAssembler* masm) {
1310 KeyedStoreIC::GenerateMegamorphic(masm, STRICT);
1314 static void Generate_KeyedStoreIC_Generic(MacroAssembler* masm) {
1315 KeyedStoreIC::GenerateGeneric(masm, SLOPPY);
1319 static void Generate_KeyedStoreIC_Generic_Strict(MacroAssembler* masm) {
1320 KeyedStoreIC::GenerateGeneric(masm, STRICT);
1324 static void Generate_KeyedStoreIC_Miss(MacroAssembler* masm) {
1325 KeyedStoreIC::GenerateMiss(masm);
1329 static void Generate_KeyedStoreIC_Initialize(MacroAssembler* masm) {
1330 KeyedStoreIC::GenerateInitialize(masm);
1334 static void Generate_KeyedStoreIC_Initialize_Strict(MacroAssembler* masm) {
1335 KeyedStoreIC::GenerateInitialize(masm);
1339 static void Generate_KeyedStoreIC_PreMonomorphic(MacroAssembler* masm) {
1340 KeyedStoreIC::GeneratePreMonomorphic(masm);
1344 static void Generate_KeyedStoreIC_PreMonomorphic_Strict(MacroAssembler* masm) {
1345 KeyedStoreIC::GeneratePreMonomorphic(masm);
1349 static void Generate_KeyedStoreIC_SloppyArguments(MacroAssembler* masm) {
1350 KeyedStoreIC::GenerateSloppyArguments(masm);
1354 static void Generate_CallICStub_DebugBreak(MacroAssembler* masm) {
1355 DebugCodegen::GenerateCallICStubDebugBreak(masm);
1359 static void Generate_LoadIC_DebugBreak(MacroAssembler* masm) {
1360 DebugCodegen::GenerateLoadICDebugBreak(masm);
1364 static void Generate_StoreIC_DebugBreak(MacroAssembler* masm) {
1365 DebugCodegen::GenerateStoreICDebugBreak(masm);
1369 static void Generate_KeyedLoadIC_DebugBreak(MacroAssembler* masm) {
1370 DebugCodegen::GenerateKeyedLoadICDebugBreak(masm);
1374 static void Generate_KeyedStoreIC_DebugBreak(MacroAssembler* masm) {
1375 DebugCodegen::GenerateKeyedStoreICDebugBreak(masm);
1379 static void Generate_CompareNilIC_DebugBreak(MacroAssembler* masm) {
1380 DebugCodegen::GenerateCompareNilICDebugBreak(masm);
1384 static void Generate_Return_DebugBreak(MacroAssembler* masm) {
1385 DebugCodegen::GenerateReturnDebugBreak(masm);
1389 static void Generate_CallFunctionStub_DebugBreak(MacroAssembler* masm) {
1390 DebugCodegen::GenerateCallFunctionStubDebugBreak(masm);
1394 static void Generate_CallConstructStub_DebugBreak(MacroAssembler* masm) {
1395 DebugCodegen::GenerateCallConstructStubDebugBreak(masm);
1399 static void Generate_CallConstructStub_Recording_DebugBreak(
1400 MacroAssembler* masm) {
1401 DebugCodegen::GenerateCallConstructStubRecordDebugBreak(masm);
1405 static void Generate_Slot_DebugBreak(MacroAssembler* masm) {
1406 DebugCodegen::GenerateSlotDebugBreak(masm);
1410 static void Generate_PlainReturn_LiveEdit(MacroAssembler* masm) {
1411 DebugCodegen::GeneratePlainReturnLiveEdit(masm);
1415 static void Generate_FrameDropper_LiveEdit(MacroAssembler* masm) {
1416 DebugCodegen::GenerateFrameDropperLiveEdit(masm);
1420 Builtins::Builtins() : initialized_(false) {
1421 memset(builtins_, 0, sizeof(builtins_[0]) * builtin_count);
1422 memset(names_, 0, sizeof(names_[0]) * builtin_count);
1426 Builtins::~Builtins() {
1430 #define DEF_ENUM_C(name, ignore) FUNCTION_ADDR(Builtin_##name),
1431 Address const Builtins::c_functions_[cfunction_count] = {
1432 BUILTIN_LIST_C(DEF_ENUM_C)
1436 #define DEF_JS_NAME(name, ignore) #name,
1437 #define DEF_JS_ARGC(ignore, argc) argc,
1438 const char* const Builtins::javascript_names_[id_count] = {
1439 BUILTINS_LIST_JS(DEF_JS_NAME)
1442 int const Builtins::javascript_argc_[id_count] = {
1443 BUILTINS_LIST_JS(DEF_JS_ARGC)
1448 struct BuiltinDesc {
1451 const char* s_name; // name is only used for generating log information.
1454 BuiltinExtraArguments extra_args;
1457 #define BUILTIN_FUNCTION_TABLE_INIT { V8_ONCE_INIT, {} }
1459 class BuiltinFunctionTable {
1461 BuiltinDesc* functions() {
1462 base::CallOnce(&once_, &Builtins::InitBuiltinFunctionTable);
1466 base::OnceType once_;
1467 BuiltinDesc functions_[Builtins::builtin_count + 1];
1469 friend class Builtins;
1472 static BuiltinFunctionTable builtin_function_table =
1473 BUILTIN_FUNCTION_TABLE_INIT;
1475 // Define array of pointers to generators and C builtin functions.
1476 // We do this in a sort of roundabout way so that we can do the initialization
1477 // within the lexical scope of Builtins:: and within a context where
1478 // Code::Flags names a non-abstract type.
1479 void Builtins::InitBuiltinFunctionTable() {
1480 BuiltinDesc* functions = builtin_function_table.functions_;
1481 functions[builtin_count].generator = NULL;
1482 functions[builtin_count].c_code = NULL;
1483 functions[builtin_count].s_name = NULL;
1484 functions[builtin_count].name = builtin_count;
1485 functions[builtin_count].flags = static_cast<Code::Flags>(0);
1486 functions[builtin_count].extra_args = NO_EXTRA_ARGUMENTS;
1488 #define DEF_FUNCTION_PTR_C(aname, aextra_args) \
1489 functions->generator = FUNCTION_ADDR(Generate_Adaptor); \
1490 functions->c_code = FUNCTION_ADDR(Builtin_##aname); \
1491 functions->s_name = #aname; \
1492 functions->name = c_##aname; \
1493 functions->flags = Code::ComputeFlags(Code::BUILTIN); \
1494 functions->extra_args = aextra_args; \
1497 #define DEF_FUNCTION_PTR_A(aname, kind, state, extra) \
1498 functions->generator = FUNCTION_ADDR(Generate_##aname); \
1499 functions->c_code = NULL; \
1500 functions->s_name = #aname; \
1501 functions->name = k##aname; \
1502 functions->flags = Code::ComputeFlags(Code::kind, \
1505 functions->extra_args = NO_EXTRA_ARGUMENTS; \
1508 #define DEF_FUNCTION_PTR_H(aname, kind) \
1509 functions->generator = FUNCTION_ADDR(Generate_##aname); \
1510 functions->c_code = NULL; \
1511 functions->s_name = #aname; \
1512 functions->name = k##aname; \
1513 functions->flags = Code::ComputeHandlerFlags(Code::kind); \
1514 functions->extra_args = NO_EXTRA_ARGUMENTS; \
1517 BUILTIN_LIST_C(DEF_FUNCTION_PTR_C)
1518 BUILTIN_LIST_A(DEF_FUNCTION_PTR_A)
1519 BUILTIN_LIST_H(DEF_FUNCTION_PTR_H)
1520 BUILTIN_LIST_DEBUG_A(DEF_FUNCTION_PTR_A)
1522 #undef DEF_FUNCTION_PTR_C
1523 #undef DEF_FUNCTION_PTR_A
1527 void Builtins::SetUp(Isolate* isolate, bool create_heap_objects) {
1528 DCHECK(!initialized_);
1530 // Create a scope for the handles in the builtins.
1531 HandleScope scope(isolate);
1533 const BuiltinDesc* functions = builtin_function_table.functions();
1535 // For now we generate builtin adaptor code into a stack-allocated
1536 // buffer, before copying it into individual code objects. Be careful
1537 // with alignment, some platforms don't like unaligned code.
1539 // We can generate a lot of debug code on Arm64.
1540 const size_t buffer_size = 32*KB;
1542 const size_t buffer_size = 8*KB;
1544 union { int force_alignment; byte buffer[buffer_size]; } u;
1546 // Traverse the list of builtins and generate an adaptor in a
1547 // separate code object for each one.
1548 for (int i = 0; i < builtin_count; i++) {
1549 if (create_heap_objects) {
1550 MacroAssembler masm(isolate, u.buffer, sizeof u.buffer);
1551 // Generate the code/adaptor.
1552 typedef void (*Generator)(MacroAssembler*, int, BuiltinExtraArguments);
1553 Generator g = FUNCTION_CAST<Generator>(functions[i].generator);
1554 // We pass all arguments to the generator, but it may not use all of
1555 // them. This works because the first arguments are on top of the
1557 DCHECK(!masm.has_frame());
1558 g(&masm, functions[i].name, functions[i].extra_args);
1559 // Move the code into the object heap.
1561 masm.GetCode(&desc);
1562 Code::Flags flags = functions[i].flags;
1564 isolate->factory()->NewCode(desc, flags, masm.CodeObject());
1565 // Log the event and add the code to the builtins array.
1567 CodeCreateEvent(Logger::BUILTIN_TAG, *code, functions[i].s_name));
1568 builtins_[i] = *code;
1569 code->set_builtin_index(i);
1570 #ifdef ENABLE_DISASSEMBLER
1571 if (FLAG_print_builtin_code) {
1572 CodeTracer::Scope trace_scope(isolate->GetCodeTracer());
1573 OFStream os(trace_scope.file());
1574 os << "Builtin: " << functions[i].s_name << "\n";
1575 code->Disassemble(functions[i].s_name, os);
1580 // Deserializing. The values will be filled in during IterateBuiltins.
1581 builtins_[i] = NULL;
1583 names_[i] = functions[i].s_name;
1586 // Mark as initialized.
1587 initialized_ = true;
1591 void Builtins::TearDown() {
1592 initialized_ = false;
1596 void Builtins::IterateBuiltins(ObjectVisitor* v) {
1597 v->VisitPointers(&builtins_[0], &builtins_[0] + builtin_count);
1601 const char* Builtins::Lookup(byte* pc) {
1602 // may be called during initialization (disassembler!)
1604 for (int i = 0; i < builtin_count; i++) {
1605 Code* entry = Code::cast(builtins_[i]);
1606 if (entry->contains(pc)) {
1615 void Builtins::Generate_InterruptCheck(MacroAssembler* masm) {
1616 masm->TailCallRuntime(Runtime::kInterrupt, 0, 1);
1620 void Builtins::Generate_StackCheck(MacroAssembler* masm) {
1621 masm->TailCallRuntime(Runtime::kStackGuard, 0, 1);
1625 #define DEFINE_BUILTIN_ACCESSOR_C(name, ignore) \
1626 Handle<Code> Builtins::name() { \
1627 Code** code_address = \
1628 reinterpret_cast<Code**>(builtin_address(k##name)); \
1629 return Handle<Code>(code_address); \
1631 #define DEFINE_BUILTIN_ACCESSOR_A(name, kind, state, extra) \
1632 Handle<Code> Builtins::name() { \
1633 Code** code_address = \
1634 reinterpret_cast<Code**>(builtin_address(k##name)); \
1635 return Handle<Code>(code_address); \
1637 #define DEFINE_BUILTIN_ACCESSOR_H(name, kind) \
1638 Handle<Code> Builtins::name() { \
1639 Code** code_address = \
1640 reinterpret_cast<Code**>(builtin_address(k##name)); \
1641 return Handle<Code>(code_address); \
1643 BUILTIN_LIST_C(DEFINE_BUILTIN_ACCESSOR_C)
1644 BUILTIN_LIST_A(DEFINE_BUILTIN_ACCESSOR_A)
1645 BUILTIN_LIST_H(DEFINE_BUILTIN_ACCESSOR_H)
1646 BUILTIN_LIST_DEBUG_A(DEFINE_BUILTIN_ACCESSOR_A)
1647 #undef DEFINE_BUILTIN_ACCESSOR_C
1648 #undef DEFINE_BUILTIN_ACCESSOR_A
1651 } } // namespace v8::internal