1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
8 #include "src/arguments.h"
9 #include "src/base/once.h"
10 #include "src/bootstrapper.h"
11 #include "src/builtins.h"
12 #include "src/cpu-profiler.h"
13 #include "src/gdb-jit.h"
14 #include "src/heap/mark-compact.h"
15 #include "src/heap-profiler.h"
16 #include "src/ic/ic.h"
17 #include "src/ic/ic-compiler.h"
18 #include "src/prototype.h"
19 #include "src/vm-state-inl.h"
26 // Arguments object passed to C++ builtins.
27 template <BuiltinExtraArguments extra_args>
28 class BuiltinArguments : public Arguments {
30 BuiltinArguments(int length, Object** arguments)
31 : Arguments(length, arguments) { }
33 Object*& operator[] (int index) {
34 DCHECK(index < length());
35 return Arguments::operator[](index);
38 template <class S> Handle<S> at(int index) {
39 DCHECK(index < length());
40 return Arguments::at<S>(index);
43 Handle<Object> receiver() {
44 return Arguments::at<Object>(0);
47 Handle<JSFunction> called_function() {
48 STATIC_ASSERT(extra_args == NEEDS_CALLED_FUNCTION);
49 return Arguments::at<JSFunction>(Arguments::length() - 1);
52 // Gets the total number of arguments including the receiver (but
53 // excluding extra arguments).
55 STATIC_ASSERT(extra_args == NO_EXTRA_ARGUMENTS);
56 return Arguments::length();
61 // Check we have at least the receiver.
62 DCHECK(Arguments::length() >= 1);
68 // Specialize BuiltinArguments for the called function extra argument.
71 int BuiltinArguments<NEEDS_CALLED_FUNCTION>::length() const {
72 return Arguments::length() - 1;
77 void BuiltinArguments<NEEDS_CALLED_FUNCTION>::Verify() {
78 // Check we have at least the receiver and the called function.
79 DCHECK(Arguments::length() >= 2);
80 // Make sure cast to JSFunction succeeds.
86 #define DEF_ARG_TYPE(name, spec) \
87 typedef BuiltinArguments<spec> name##ArgumentsType;
88 BUILTIN_LIST_C(DEF_ARG_TYPE)
93 // ----------------------------------------------------------------------------
94 // Support macro for defining builtins in C++.
95 // ----------------------------------------------------------------------------
97 // A builtin function is defined by writing:
103 // In the body of the builtin function the arguments can be accessed
104 // through the BuiltinArguments object args.
108 #define BUILTIN(name) \
109 MUST_USE_RESULT static Object* Builtin_Impl_##name( \
110 name##ArgumentsType args, Isolate* isolate); \
111 MUST_USE_RESULT static Object* Builtin_##name( \
112 int args_length, Object** args_object, Isolate* isolate) { \
113 name##ArgumentsType args(args_length, args_object); \
115 return Builtin_Impl_##name(args, isolate); \
117 MUST_USE_RESULT static Object* Builtin_Impl_##name( \
118 name##ArgumentsType args, Isolate* isolate)
120 #else // For release mode.
122 #define BUILTIN(name) \
123 static Object* Builtin_impl##name( \
124 name##ArgumentsType args, Isolate* isolate); \
125 static Object* Builtin_##name( \
126 int args_length, Object** args_object, Isolate* isolate) { \
127 name##ArgumentsType args(args_length, args_object); \
128 return Builtin_impl##name(args, isolate); \
130 static Object* Builtin_impl##name( \
131 name##ArgumentsType args, Isolate* isolate)
136 static inline bool CalledAsConstructor(Isolate* isolate) {
137 // Calculate the result using a full stack frame iterator and check
138 // that the state of the stack is as we assume it to be in the
140 StackFrameIterator it(isolate);
141 DCHECK(it.frame()->is_exit());
143 StackFrame* frame = it.frame();
144 bool reference_result = frame->is_construct();
145 Address fp = Isolate::c_entry_fp(isolate->thread_local_top());
146 // Because we know fp points to an exit frame we can use the relevant
147 // part of ExitFrame::ComputeCallerState directly.
148 const int kCallerOffset = ExitFrameConstants::kCallerFPOffset;
149 Address caller_fp = Memory::Address_at(fp + kCallerOffset);
150 // This inlines the part of StackFrame::ComputeType that grabs the
151 // type of the current frame. Note that StackFrame::ComputeType
152 // has been specialized for each architecture so if any one of them
153 // changes this code has to be changed as well.
154 const int kMarkerOffset = StandardFrameConstants::kMarkerOffset;
155 const Smi* kConstructMarker = Smi::FromInt(StackFrame::CONSTRUCT);
156 Object* marker = Memory::Object_at(caller_fp + kMarkerOffset);
157 bool result = (marker == kConstructMarker);
158 DCHECK_EQ(result, reference_result);
164 // ----------------------------------------------------------------------------
168 return isolate->heap()->undefined_value(); // Make compiler happy.
172 BUILTIN(EmptyFunction) {
173 return isolate->heap()->undefined_value();
177 static void MoveDoubleElements(FixedDoubleArray* dst, int dst_index,
178 FixedDoubleArray* src, int src_index, int len) {
179 if (len == 0) return;
180 MemMove(dst->data_start() + dst_index, src->data_start() + src_index,
185 static bool ArrayPrototypeHasNoElements(Heap* heap,
186 Context* native_context,
187 JSObject* array_proto) {
188 DisallowHeapAllocation no_gc;
189 // This method depends on non writability of Object and Array prototype
191 if (array_proto->elements() != heap->empty_fixed_array()) return false;
193 PrototypeIterator iter(heap->isolate(), array_proto);
194 if (iter.IsAtEnd()) {
197 array_proto = JSObject::cast(iter.GetCurrent());
198 if (array_proto != native_context->initial_object_prototype()) return false;
199 if (array_proto->elements() != heap->empty_fixed_array()) return false;
201 return iter.IsAtEnd();
205 // Returns empty handle if not applicable.
207 static inline MaybeHandle<FixedArrayBase> EnsureJSArrayWithWritableFastElements(
209 Handle<Object> receiver,
211 int first_added_arg) {
212 if (!receiver->IsJSArray()) return MaybeHandle<FixedArrayBase>();
213 Handle<JSArray> array = Handle<JSArray>::cast(receiver);
214 // If there may be elements accessors in the prototype chain, the fast path
215 // cannot be used if there arguments to add to the array.
216 if (args != NULL && array->map()->DictionaryElementsInPrototypeChainOnly()) {
217 return MaybeHandle<FixedArrayBase>();
219 if (array->map()->is_observed()) return MaybeHandle<FixedArrayBase>();
220 if (!array->map()->is_extensible()) return MaybeHandle<FixedArrayBase>();
221 Handle<FixedArrayBase> elms(array->elements(), isolate);
222 Heap* heap = isolate->heap();
223 Map* map = elms->map();
224 if (map == heap->fixed_array_map()) {
225 if (args == NULL || array->HasFastObjectElements()) return elms;
226 } else if (map == heap->fixed_cow_array_map()) {
227 elms = JSObject::EnsureWritableFastElements(array);
228 if (args == NULL || array->HasFastObjectElements()) return elms;
229 } else if (map == heap->fixed_double_array_map()) {
230 if (args == NULL) return elms;
232 return MaybeHandle<FixedArrayBase>();
235 // Need to ensure that the arguments passed in args can be contained in
237 int args_length = args->length();
238 if (first_added_arg >= args_length) return handle(array->elements(), isolate);
240 ElementsKind origin_kind = array->map()->elements_kind();
241 DCHECK(!IsFastObjectElementsKind(origin_kind));
242 ElementsKind target_kind = origin_kind;
244 DisallowHeapAllocation no_gc;
245 int arg_count = args->length() - first_added_arg;
246 Object** arguments = args->arguments() - first_added_arg - (arg_count - 1);
247 for (int i = 0; i < arg_count; i++) {
248 Object* arg = arguments[i];
249 if (arg->IsHeapObject()) {
250 if (arg->IsHeapNumber()) {
251 target_kind = FAST_DOUBLE_ELEMENTS;
253 target_kind = FAST_ELEMENTS;
259 if (target_kind != origin_kind) {
260 JSObject::TransitionElementsKind(array, target_kind);
261 return handle(array->elements(), isolate);
267 static inline bool IsJSArrayFastElementMovingAllowed(Heap* heap,
269 if (!FLAG_clever_optimizations) return false;
270 DisallowHeapAllocation no_gc;
271 Context* native_context = heap->isolate()->context()->native_context();
272 JSObject* array_proto =
273 JSObject::cast(native_context->array_function()->prototype());
274 PrototypeIterator iter(heap->isolate(), receiver);
275 return iter.GetCurrent() == array_proto &&
276 ArrayPrototypeHasNoElements(heap, native_context, array_proto);
280 MUST_USE_RESULT static Object* CallJsBuiltin(
283 BuiltinArguments<NO_EXTRA_ARGUMENTS> args) {
284 HandleScope handleScope(isolate);
286 Handle<Object> js_builtin = Object::GetProperty(
288 handle(isolate->native_context()->builtins(), isolate),
289 name).ToHandleChecked();
290 Handle<JSFunction> function = Handle<JSFunction>::cast(js_builtin);
291 int argc = args.length() - 1;
292 ScopedVector<Handle<Object> > argv(argc);
293 for (int i = 0; i < argc; ++i) {
294 argv[i] = args.at<Object>(i + 1);
296 Handle<Object> result;
297 ASSIGN_RETURN_FAILURE_ON_EXCEPTION(
299 Execution::Call(isolate,
309 HandleScope scope(isolate);
310 Handle<Object> receiver = args.receiver();
311 MaybeHandle<FixedArrayBase> maybe_elms_obj =
312 EnsureJSArrayWithWritableFastElements(isolate, receiver, &args, 1);
313 Handle<FixedArrayBase> elms_obj;
314 if (!maybe_elms_obj.ToHandle(&elms_obj)) {
315 return CallJsBuiltin(isolate, "ArrayPush", args);
318 Handle<JSArray> array = Handle<JSArray>::cast(receiver);
319 int len = Smi::cast(array->length())->value();
320 int to_add = args.length() - 1;
321 if (to_add > 0 && JSArray::WouldChangeReadOnlyLength(array, len + to_add)) {
322 return CallJsBuiltin(isolate, "ArrayPush", args);
324 DCHECK(!array->map()->is_observed());
326 ElementsKind kind = array->GetElementsKind();
328 if (IsFastSmiOrObjectElementsKind(kind)) {
329 Handle<FixedArray> elms = Handle<FixedArray>::cast(elms_obj);
331 return Smi::FromInt(len);
333 // Currently fixed arrays cannot grow too big, so
334 // we should never hit this case.
335 DCHECK(to_add <= (Smi::kMaxValue - len));
337 int new_length = len + to_add;
339 if (new_length > elms->length()) {
340 // New backing storage is needed.
341 int capacity = new_length + (new_length >> 1) + 16;
342 Handle<FixedArray> new_elms =
343 isolate->factory()->NewUninitializedFixedArray(capacity);
345 ElementsAccessor* accessor = array->GetElementsAccessor();
346 accessor->CopyElements(
347 elms_obj, 0, kind, new_elms, 0,
348 ElementsAccessor::kCopyToEndAndInitializeToHole);
353 // Add the provided values.
354 DisallowHeapAllocation no_gc;
355 WriteBarrierMode mode = elms->GetWriteBarrierMode(no_gc);
356 for (int index = 0; index < to_add; index++) {
357 elms->set(index + len, args[index + 1], mode);
360 if (*elms != array->elements()) {
361 array->set_elements(*elms);
365 array->set_length(Smi::FromInt(new_length));
366 return Smi::FromInt(new_length);
368 int elms_len = elms_obj->length();
370 return Smi::FromInt(len);
372 // Currently fixed arrays cannot grow too big, so
373 // we should never hit this case.
374 DCHECK(to_add <= (Smi::kMaxValue - len));
376 int new_length = len + to_add;
378 Handle<FixedDoubleArray> new_elms;
380 if (new_length > elms_len) {
381 // New backing storage is needed.
382 int capacity = new_length + (new_length >> 1) + 16;
383 // Create new backing store; since capacity > 0, we can
384 // safely cast to FixedDoubleArray.
385 new_elms = Handle<FixedDoubleArray>::cast(
386 isolate->factory()->NewFixedDoubleArray(capacity));
388 ElementsAccessor* accessor = array->GetElementsAccessor();
389 accessor->CopyElements(
390 elms_obj, 0, kind, new_elms, 0,
391 ElementsAccessor::kCopyToEndAndInitializeToHole);
394 // to_add is > 0 and new_length <= elms_len, so elms_obj cannot be the
395 // empty_fixed_array.
396 new_elms = Handle<FixedDoubleArray>::cast(elms_obj);
399 // Add the provided values.
400 DisallowHeapAllocation no_gc;
402 for (index = 0; index < to_add; index++) {
403 Object* arg = args[index + 1];
404 new_elms->set(index + len, arg->Number());
407 if (*new_elms != array->elements()) {
408 array->set_elements(*new_elms);
412 array->set_length(Smi::FromInt(new_length));
413 return Smi::FromInt(new_length);
419 HandleScope scope(isolate);
420 Handle<Object> receiver = args.receiver();
421 MaybeHandle<FixedArrayBase> maybe_elms_obj =
422 EnsureJSArrayWithWritableFastElements(isolate, receiver, NULL, 0);
423 Handle<FixedArrayBase> elms_obj;
424 if (!maybe_elms_obj.ToHandle(&elms_obj)) {
425 return CallJsBuiltin(isolate, "ArrayPop", args);
428 Handle<JSArray> array = Handle<JSArray>::cast(receiver);
429 DCHECK(!array->map()->is_observed());
431 int len = Smi::cast(array->length())->value();
432 if (len == 0) return isolate->heap()->undefined_value();
434 ElementsAccessor* accessor = array->GetElementsAccessor();
435 int new_length = len - 1;
436 Handle<Object> element =
437 accessor->Get(array, array, new_length, elms_obj).ToHandleChecked();
438 if (element->IsTheHole()) {
439 return CallJsBuiltin(isolate, "ArrayPop", args);
441 RETURN_FAILURE_ON_EXCEPTION(
443 accessor->SetLength(array, handle(Smi::FromInt(new_length), isolate)));
448 BUILTIN(ArrayShift) {
449 HandleScope scope(isolate);
450 Heap* heap = isolate->heap();
451 Handle<Object> receiver = args.receiver();
452 MaybeHandle<FixedArrayBase> maybe_elms_obj =
453 EnsureJSArrayWithWritableFastElements(isolate, receiver, NULL, 0);
454 Handle<FixedArrayBase> elms_obj;
455 if (!maybe_elms_obj.ToHandle(&elms_obj) ||
456 !IsJSArrayFastElementMovingAllowed(heap,
457 *Handle<JSArray>::cast(receiver))) {
458 return CallJsBuiltin(isolate, "ArrayShift", args);
460 Handle<JSArray> array = Handle<JSArray>::cast(receiver);
461 DCHECK(!array->map()->is_observed());
463 int len = Smi::cast(array->length())->value();
464 if (len == 0) return heap->undefined_value();
467 ElementsAccessor* accessor = array->GetElementsAccessor();
468 Handle<Object> first =
469 accessor->Get(array, array, 0, elms_obj).ToHandleChecked();
470 if (first->IsTheHole()) {
471 return CallJsBuiltin(isolate, "ArrayShift", args);
474 if (heap->CanMoveObjectStart(*elms_obj)) {
475 array->set_elements(heap->LeftTrimFixedArray(*elms_obj, 1));
477 // Shift the elements.
478 if (elms_obj->IsFixedArray()) {
479 Handle<FixedArray> elms = Handle<FixedArray>::cast(elms_obj);
480 DisallowHeapAllocation no_gc;
481 heap->MoveElements(*elms, 0, 1, len - 1);
482 elms->set(len - 1, heap->the_hole_value());
484 Handle<FixedDoubleArray> elms = Handle<FixedDoubleArray>::cast(elms_obj);
485 MoveDoubleElements(*elms, 0, *elms, 1, len - 1);
486 elms->set_the_hole(len - 1);
491 array->set_length(Smi::FromInt(len - 1));
497 BUILTIN(ArrayUnshift) {
498 HandleScope scope(isolate);
499 Heap* heap = isolate->heap();
500 Handle<Object> receiver = args.receiver();
501 MaybeHandle<FixedArrayBase> maybe_elms_obj =
502 EnsureJSArrayWithWritableFastElements(isolate, receiver, NULL, 0);
503 Handle<FixedArrayBase> elms_obj;
504 if (!maybe_elms_obj.ToHandle(&elms_obj) ||
505 !IsJSArrayFastElementMovingAllowed(heap,
506 *Handle<JSArray>::cast(receiver))) {
507 return CallJsBuiltin(isolate, "ArrayUnshift", args);
509 Handle<JSArray> array = Handle<JSArray>::cast(receiver);
510 DCHECK(!array->map()->is_observed());
511 if (!array->HasFastSmiOrObjectElements()) {
512 return CallJsBuiltin(isolate, "ArrayUnshift", args);
514 int len = Smi::cast(array->length())->value();
515 int to_add = args.length() - 1;
516 int new_length = len + to_add;
517 // Currently fixed arrays cannot grow too big, so
518 // we should never hit this case.
519 DCHECK(to_add <= (Smi::kMaxValue - len));
521 if (to_add > 0 && JSArray::WouldChangeReadOnlyLength(array, len + to_add)) {
522 return CallJsBuiltin(isolate, "ArrayUnshift", args);
525 Handle<FixedArray> elms = Handle<FixedArray>::cast(elms_obj);
527 JSObject::EnsureCanContainElements(array, &args, 1, to_add,
528 DONT_ALLOW_DOUBLE_ELEMENTS);
530 if (new_length > elms->length()) {
531 // New backing storage is needed.
532 int capacity = new_length + (new_length >> 1) + 16;
533 Handle<FixedArray> new_elms =
534 isolate->factory()->NewUninitializedFixedArray(capacity);
536 ElementsKind kind = array->GetElementsKind();
537 ElementsAccessor* accessor = array->GetElementsAccessor();
538 accessor->CopyElements(
539 elms, 0, kind, new_elms, to_add,
540 ElementsAccessor::kCopyToEndAndInitializeToHole);
543 array->set_elements(*elms);
545 DisallowHeapAllocation no_gc;
546 heap->MoveElements(*elms, to_add, 0, len);
549 // Add the provided values.
550 DisallowHeapAllocation no_gc;
551 WriteBarrierMode mode = elms->GetWriteBarrierMode(no_gc);
552 for (int i = 0; i < to_add; i++) {
553 elms->set(i, args[i + 1], mode);
557 array->set_length(Smi::FromInt(new_length));
558 return Smi::FromInt(new_length);
562 BUILTIN(ArraySlice) {
563 HandleScope scope(isolate);
564 Heap* heap = isolate->heap();
565 Handle<Object> receiver = args.receiver();
567 int relative_start = 0;
568 int relative_end = 0;
570 DisallowHeapAllocation no_gc;
571 if (receiver->IsJSArray()) {
572 JSArray* array = JSArray::cast(*receiver);
573 if (!IsJSArrayFastElementMovingAllowed(heap, array)) {
574 AllowHeapAllocation allow_allocation;
575 return CallJsBuiltin(isolate, "ArraySlice", args);
578 if (!array->HasFastElements()) {
579 AllowHeapAllocation allow_allocation;
580 return CallJsBuiltin(isolate, "ArraySlice", args);
583 len = Smi::cast(array->length())->value();
585 // Array.slice(arguments, ...) is quite a common idiom (notably more
586 // than 50% of invocations in Web apps). Treat it in C++ as well.
588 isolate->context()->native_context()->sloppy_arguments_map();
590 bool is_arguments_object_with_fast_elements =
591 receiver->IsJSObject() &&
592 JSObject::cast(*receiver)->map() == arguments_map;
593 if (!is_arguments_object_with_fast_elements) {
594 AllowHeapAllocation allow_allocation;
595 return CallJsBuiltin(isolate, "ArraySlice", args);
597 JSObject* object = JSObject::cast(*receiver);
599 if (!object->HasFastElements()) {
600 AllowHeapAllocation allow_allocation;
601 return CallJsBuiltin(isolate, "ArraySlice", args);
604 Object* len_obj = object->InObjectPropertyAt(Heap::kArgumentsLengthIndex);
605 if (!len_obj->IsSmi()) {
606 AllowHeapAllocation allow_allocation;
607 return CallJsBuiltin(isolate, "ArraySlice", args);
609 len = Smi::cast(len_obj)->value();
610 if (len > object->elements()->length()) {
611 AllowHeapAllocation allow_allocation;
612 return CallJsBuiltin(isolate, "ArraySlice", args);
617 int n_arguments = args.length() - 1;
619 // Note carefully choosen defaults---if argument is missing,
620 // it's undefined which gets converted to 0 for relative_start
621 // and to len for relative_end.
624 if (n_arguments > 0) {
625 Object* arg1 = args[1];
627 relative_start = Smi::cast(arg1)->value();
628 } else if (arg1->IsHeapNumber()) {
629 double start = HeapNumber::cast(arg1)->value();
630 if (start < kMinInt || start > kMaxInt) {
631 AllowHeapAllocation allow_allocation;
632 return CallJsBuiltin(isolate, "ArraySlice", args);
634 relative_start = std::isnan(start) ? 0 : static_cast<int>(start);
635 } else if (!arg1->IsUndefined()) {
636 AllowHeapAllocation allow_allocation;
637 return CallJsBuiltin(isolate, "ArraySlice", args);
639 if (n_arguments > 1) {
640 Object* arg2 = args[2];
642 relative_end = Smi::cast(arg2)->value();
643 } else if (arg2->IsHeapNumber()) {
644 double end = HeapNumber::cast(arg2)->value();
645 if (end < kMinInt || end > kMaxInt) {
646 AllowHeapAllocation allow_allocation;
647 return CallJsBuiltin(isolate, "ArraySlice", args);
649 relative_end = std::isnan(end) ? 0 : static_cast<int>(end);
650 } else if (!arg2->IsUndefined()) {
651 AllowHeapAllocation allow_allocation;
652 return CallJsBuiltin(isolate, "ArraySlice", args);
658 // ECMAScript 232, 3rd Edition, Section 15.4.4.10, step 6.
659 int k = (relative_start < 0) ? Max(len + relative_start, 0)
660 : Min(relative_start, len);
662 // ECMAScript 232, 3rd Edition, Section 15.4.4.10, step 8.
663 int final = (relative_end < 0) ? Max(len + relative_end, 0)
664 : Min(relative_end, len);
666 // Calculate the length of result array.
667 int result_len = Max(final - k, 0);
669 Handle<JSObject> object = Handle<JSObject>::cast(receiver);
670 Handle<FixedArrayBase> elms(object->elements(), isolate);
672 ElementsKind kind = object->GetElementsKind();
673 if (IsHoleyElementsKind(kind)) {
674 DisallowHeapAllocation no_gc;
676 ElementsAccessor* accessor = ElementsAccessor::ForKind(kind);
677 for (int i = k; i < final; i++) {
678 if (!accessor->HasElement(object, object, i, elms)) {
684 kind = GetPackedElementsKind(kind);
685 } else if (!receiver->IsJSArray()) {
686 AllowHeapAllocation allow_allocation;
687 return CallJsBuiltin(isolate, "ArraySlice", args);
691 Handle<JSArray> result_array =
692 isolate->factory()->NewJSArray(kind, result_len, result_len);
694 DisallowHeapAllocation no_gc;
695 if (result_len == 0) return *result_array;
697 ElementsAccessor* accessor = object->GetElementsAccessor();
698 accessor->CopyElements(
699 elms, k, kind, handle(result_array->elements(), isolate), 0, result_len);
700 return *result_array;
704 BUILTIN(ArraySplice) {
705 HandleScope scope(isolate);
706 Heap* heap = isolate->heap();
707 Handle<Object> receiver = args.receiver();
708 MaybeHandle<FixedArrayBase> maybe_elms_obj =
709 EnsureJSArrayWithWritableFastElements(isolate, receiver, &args, 3);
710 Handle<FixedArrayBase> elms_obj;
711 if (!maybe_elms_obj.ToHandle(&elms_obj) ||
712 !IsJSArrayFastElementMovingAllowed(heap,
713 *Handle<JSArray>::cast(receiver))) {
714 return CallJsBuiltin(isolate, "ArraySplice", args);
716 Handle<JSArray> array = Handle<JSArray>::cast(receiver);
717 DCHECK(!array->map()->is_observed());
719 int len = Smi::cast(array->length())->value();
721 int n_arguments = args.length() - 1;
723 int relative_start = 0;
724 if (n_arguments > 0) {
725 DisallowHeapAllocation no_gc;
726 Object* arg1 = args[1];
728 relative_start = Smi::cast(arg1)->value();
729 } else if (arg1->IsHeapNumber()) {
730 double start = HeapNumber::cast(arg1)->value();
731 if (start < kMinInt || start > kMaxInt) {
732 AllowHeapAllocation allow_allocation;
733 return CallJsBuiltin(isolate, "ArraySplice", args);
735 relative_start = std::isnan(start) ? 0 : static_cast<int>(start);
736 } else if (!arg1->IsUndefined()) {
737 AllowHeapAllocation allow_allocation;
738 return CallJsBuiltin(isolate, "ArraySplice", args);
741 int actual_start = (relative_start < 0) ? Max(len + relative_start, 0)
742 : Min(relative_start, len);
744 // SpiderMonkey, TraceMonkey and JSC treat the case where no delete count is
745 // given as a request to delete all the elements from the start.
746 // And it differs from the case of undefined delete count.
747 // This does not follow ECMA-262, but we do the same for
749 int actual_delete_count;
750 if (n_arguments == 1) {
751 DCHECK(len - actual_start >= 0);
752 actual_delete_count = len - actual_start;
754 int value = 0; // ToInteger(undefined) == 0
755 if (n_arguments > 1) {
756 DisallowHeapAllocation no_gc;
757 Object* arg2 = args[2];
759 value = Smi::cast(arg2)->value();
761 AllowHeapAllocation allow_allocation;
762 return CallJsBuiltin(isolate, "ArraySplice", args);
765 actual_delete_count = Min(Max(value, 0), len - actual_start);
768 ElementsKind elements_kind = array->GetElementsKind();
770 int item_count = (n_arguments > 1) ? (n_arguments - 2) : 0;
771 int new_length = len - actual_delete_count + item_count;
773 // For double mode we do not support changing the length.
774 if (new_length > len && IsFastDoubleElementsKind(elements_kind)) {
775 return CallJsBuiltin(isolate, "ArraySplice", args);
778 if (new_length == 0) {
779 Handle<JSArray> result = isolate->factory()->NewJSArrayWithElements(
780 elms_obj, elements_kind, actual_delete_count);
781 array->set_elements(heap->empty_fixed_array());
782 array->set_length(Smi::FromInt(0));
786 Handle<JSArray> result_array =
787 isolate->factory()->NewJSArray(elements_kind,
789 actual_delete_count);
791 if (actual_delete_count > 0) {
792 DisallowHeapAllocation no_gc;
793 ElementsAccessor* accessor = array->GetElementsAccessor();
794 accessor->CopyElements(
795 elms_obj, actual_start, elements_kind,
796 handle(result_array->elements(), isolate), 0, actual_delete_count);
799 bool elms_changed = false;
800 if (item_count < actual_delete_count) {
802 const bool trim_array = !heap->lo_space()->Contains(*elms_obj) &&
803 ((actual_start + item_count) <
804 (len - actual_delete_count - actual_start));
806 const int delta = actual_delete_count - item_count;
808 if (elms_obj->IsFixedDoubleArray()) {
809 Handle<FixedDoubleArray> elms =
810 Handle<FixedDoubleArray>::cast(elms_obj);
811 MoveDoubleElements(*elms, delta, *elms, 0, actual_start);
813 Handle<FixedArray> elms = Handle<FixedArray>::cast(elms_obj);
814 DisallowHeapAllocation no_gc;
815 heap->MoveElements(*elms, delta, 0, actual_start);
818 if (heap->CanMoveObjectStart(*elms_obj)) {
819 // On the fast path we move the start of the object in memory.
820 elms_obj = handle(heap->LeftTrimFixedArray(*elms_obj, delta));
822 // This is the slow path. We are going to move the elements to the left
823 // by copying them. For trimmed values we store the hole.
824 if (elms_obj->IsFixedDoubleArray()) {
825 Handle<FixedDoubleArray> elms =
826 Handle<FixedDoubleArray>::cast(elms_obj);
827 MoveDoubleElements(*elms, 0, *elms, delta, len - delta);
828 elms->FillWithHoles(len - delta, len);
830 Handle<FixedArray> elms = Handle<FixedArray>::cast(elms_obj);
831 DisallowHeapAllocation no_gc;
832 heap->MoveElements(*elms, 0, delta, len - delta);
833 elms->FillWithHoles(len - delta, len);
838 if (elms_obj->IsFixedDoubleArray()) {
839 Handle<FixedDoubleArray> elms =
840 Handle<FixedDoubleArray>::cast(elms_obj);
841 MoveDoubleElements(*elms, actual_start + item_count,
842 *elms, actual_start + actual_delete_count,
843 (len - actual_delete_count - actual_start));
844 elms->FillWithHoles(new_length, len);
846 Handle<FixedArray> elms = Handle<FixedArray>::cast(elms_obj);
847 DisallowHeapAllocation no_gc;
848 heap->MoveElements(*elms, actual_start + item_count,
849 actual_start + actual_delete_count,
850 (len - actual_delete_count - actual_start));
851 elms->FillWithHoles(new_length, len);
854 } else if (item_count > actual_delete_count) {
855 Handle<FixedArray> elms = Handle<FixedArray>::cast(elms_obj);
856 // Currently fixed arrays cannot grow too big, so
857 // we should never hit this case.
858 DCHECK((item_count - actual_delete_count) <= (Smi::kMaxValue - len));
860 // Check if array need to grow.
861 if (new_length > elms->length()) {
862 // New backing storage is needed.
863 int capacity = new_length + (new_length >> 1) + 16;
864 Handle<FixedArray> new_elms =
865 isolate->factory()->NewUninitializedFixedArray(capacity);
867 DisallowHeapAllocation no_gc;
869 ElementsKind kind = array->GetElementsKind();
870 ElementsAccessor* accessor = array->GetElementsAccessor();
871 if (actual_start > 0) {
872 // Copy the part before actual_start as is.
873 accessor->CopyElements(
874 elms, 0, kind, new_elms, 0, actual_start);
876 accessor->CopyElements(
877 elms, actual_start + actual_delete_count, kind,
878 new_elms, actual_start + item_count,
879 ElementsAccessor::kCopyToEndAndInitializeToHole);
884 DisallowHeapAllocation no_gc;
885 heap->MoveElements(*elms, actual_start + item_count,
886 actual_start + actual_delete_count,
887 (len - actual_delete_count - actual_start));
891 if (IsFastDoubleElementsKind(elements_kind)) {
892 Handle<FixedDoubleArray> elms = Handle<FixedDoubleArray>::cast(elms_obj);
893 for (int k = actual_start; k < actual_start + item_count; k++) {
894 Object* arg = args[3 + k - actual_start];
896 elms->set(k, Smi::cast(arg)->value());
898 elms->set(k, HeapNumber::cast(arg)->value());
902 Handle<FixedArray> elms = Handle<FixedArray>::cast(elms_obj);
903 DisallowHeapAllocation no_gc;
904 WriteBarrierMode mode = elms->GetWriteBarrierMode(no_gc);
905 for (int k = actual_start; k < actual_start + item_count; k++) {
906 elms->set(k, args[3 + k - actual_start], mode);
911 array->set_elements(*elms_obj);
914 array->set_length(Smi::FromInt(new_length));
916 return *result_array;
920 BUILTIN(ArrayConcat) {
921 HandleScope scope(isolate);
923 int n_arguments = args.length();
925 ElementsKind elements_kind = GetInitialFastElementsKind();
926 bool has_double = false;
928 DisallowHeapAllocation no_gc;
929 Heap* heap = isolate->heap();
930 Context* native_context = isolate->context()->native_context();
931 JSObject* array_proto =
932 JSObject::cast(native_context->array_function()->prototype());
933 if (!ArrayPrototypeHasNoElements(heap, native_context, array_proto)) {
934 AllowHeapAllocation allow_allocation;
935 return CallJsBuiltin(isolate, "ArrayConcatJS", args);
938 // Iterate through all the arguments performing checks
939 // and calculating total length.
940 bool is_holey = false;
941 for (int i = 0; i < n_arguments; i++) {
942 Object* arg = args[i];
943 PrototypeIterator iter(isolate, arg);
944 if (!arg->IsJSArray() || !JSArray::cast(arg)->HasFastElements() ||
945 iter.GetCurrent() != array_proto) {
946 AllowHeapAllocation allow_allocation;
947 return CallJsBuiltin(isolate, "ArrayConcatJS", args);
949 int len = Smi::cast(JSArray::cast(arg)->length())->value();
951 // We shouldn't overflow when adding another len.
952 const int kHalfOfMaxInt = 1 << (kBitsPerInt - 2);
953 STATIC_ASSERT(FixedArray::kMaxLength < kHalfOfMaxInt);
956 DCHECK(result_len >= 0);
958 if (result_len > FixedDoubleArray::kMaxLength) {
959 AllowHeapAllocation allow_allocation;
960 return CallJsBuiltin(isolate, "ArrayConcatJS", args);
963 ElementsKind arg_kind = JSArray::cast(arg)->map()->elements_kind();
964 has_double = has_double || IsFastDoubleElementsKind(arg_kind);
965 is_holey = is_holey || IsFastHoleyElementsKind(arg_kind);
966 if (IsMoreGeneralElementsKindTransition(elements_kind, arg_kind)) {
967 elements_kind = arg_kind;
970 if (is_holey) elements_kind = GetHoleyElementsKind(elements_kind);
973 // If a double array is concatted into a fast elements array, the fast
974 // elements array needs to be initialized to contain proper holes, since
975 // boxing doubles may cause incremental marking.
976 ArrayStorageAllocationMode mode =
977 has_double && IsFastObjectElementsKind(elements_kind)
978 ? INITIALIZE_ARRAY_ELEMENTS_WITH_HOLE : DONT_INITIALIZE_ARRAY_ELEMENTS;
979 Handle<JSArray> result_array =
980 isolate->factory()->NewJSArray(elements_kind,
984 if (result_len == 0) return *result_array;
987 Handle<FixedArrayBase> storage(result_array->elements(), isolate);
988 ElementsAccessor* accessor = ElementsAccessor::ForKind(elements_kind);
989 for (int i = 0; i < n_arguments; i++) {
990 // TODO(ishell): It is crucial to keep |array| as a raw pointer to avoid
991 // performance degradation. Revisit this later.
992 JSArray* array = JSArray::cast(args[i]);
993 int len = Smi::cast(array->length())->value();
994 ElementsKind from_kind = array->GetElementsKind();
996 accessor->CopyElements(array, 0, from_kind, storage, j, len);
1001 DCHECK(j == result_len);
1003 return *result_array;
1007 // -----------------------------------------------------------------------------
1008 // Generator and strict mode poison pills
1011 BUILTIN(StrictModePoisonPill) {
1012 HandleScope scope(isolate);
1013 return isolate->Throw(*isolate->factory()->NewTypeError(
1014 "strict_poison_pill", HandleVector<Object>(NULL, 0)));
1018 BUILTIN(GeneratorPoisonPill) {
1019 HandleScope scope(isolate);
1020 return isolate->Throw(*isolate->factory()->NewTypeError(
1021 "generator_poison_pill", HandleVector<Object>(NULL, 0)));
1025 // -----------------------------------------------------------------------------
1029 // Searches the hidden prototype chain of the given object for the first
1030 // object that is an instance of the given type. If no such object can
1031 // be found then Heap::null_value() is returned.
1032 static inline Object* FindHidden(Heap* heap,
1034 FunctionTemplateInfo* type) {
1035 for (PrototypeIterator iter(heap->isolate(), object,
1036 PrototypeIterator::START_AT_RECEIVER);
1037 !iter.IsAtEnd(PrototypeIterator::END_AT_NON_HIDDEN); iter.Advance()) {
1038 if (type->IsTemplateFor(iter.GetCurrent())) {
1039 return iter.GetCurrent();
1042 return heap->null_value();
1046 // Returns the holder JSObject if the function can legally be called
1047 // with this receiver. Returns Heap::null_value() if the call is
1048 // illegal. Any arguments that don't fit the expected type is
1049 // overwritten with undefined. Note that holder and the arguments are
1050 // implicitly rewritten with the first object in the hidden prototype
1051 // chain that actually has the expected type.
1052 static inline Object* TypeCheck(Heap* heap,
1055 FunctionTemplateInfo* info) {
1056 Object* recv = argv[0];
1057 // API calls are only supported with JSObject receivers.
1058 if (!recv->IsJSObject()) return heap->null_value();
1059 Object* sig_obj = info->signature();
1060 if (sig_obj->IsUndefined()) return recv;
1061 SignatureInfo* sig = SignatureInfo::cast(sig_obj);
1062 // If necessary, check the receiver
1063 Object* recv_type = sig->receiver();
1064 Object* holder = recv;
1065 if (!recv_type->IsUndefined()) {
1066 holder = FindHidden(heap, holder, FunctionTemplateInfo::cast(recv_type));
1067 if (holder == heap->null_value()) return heap->null_value();
1069 Object* args_obj = sig->args();
1070 // If there is no argument signature we're done
1071 if (args_obj->IsUndefined()) return holder;
1072 FixedArray* args = FixedArray::cast(args_obj);
1073 int length = args->length();
1074 if (argc <= length) length = argc - 1;
1075 for (int i = 0; i < length; i++) {
1076 Object* argtype = args->get(i);
1077 if (argtype->IsUndefined()) continue;
1078 Object** arg = &argv[-1 - i];
1079 Object* current = *arg;
1080 current = FindHidden(heap, current, FunctionTemplateInfo::cast(argtype));
1081 if (current == heap->null_value()) current = heap->undefined_value();
1088 template <bool is_construct>
1089 MUST_USE_RESULT static Object* HandleApiCallHelper(
1090 BuiltinArguments<NEEDS_CALLED_FUNCTION> args, Isolate* isolate) {
1091 DCHECK(is_construct == CalledAsConstructor(isolate));
1092 Heap* heap = isolate->heap();
1094 HandleScope scope(isolate);
1095 Handle<JSFunction> function = args.called_function();
1096 DCHECK(function->shared()->IsApiFunction());
1098 Handle<FunctionTemplateInfo> fun_data(
1099 function->shared()->get_api_func_data(), isolate);
1101 ASSIGN_RETURN_FAILURE_ON_EXCEPTION(
1103 isolate->factory()->ConfigureInstance(
1104 fun_data, Handle<JSObject>::cast(args.receiver())));
1107 SharedFunctionInfo* shared = function->shared();
1108 if (shared->strict_mode() == SLOPPY && !shared->native()) {
1109 Object* recv = args[0];
1110 DCHECK(!recv->IsNull());
1111 if (recv->IsUndefined()) args[0] = function->global_proxy();
1114 Object* raw_holder = TypeCheck(heap, args.length(), &args[0], *fun_data);
1116 if (raw_holder->IsNull()) {
1117 // This function cannot be called with the given receiver. Abort!
1118 Handle<Object> obj =
1119 isolate->factory()->NewTypeError(
1120 "illegal_invocation", HandleVector(&function, 1));
1121 return isolate->Throw(*obj);
1124 Object* raw_call_data = fun_data->call_code();
1125 if (!raw_call_data->IsUndefined()) {
1126 CallHandlerInfo* call_data = CallHandlerInfo::cast(raw_call_data);
1127 Object* callback_obj = call_data->callback();
1128 v8::FunctionCallback callback =
1129 v8::ToCData<v8::FunctionCallback>(callback_obj);
1130 Object* data_obj = call_data->data();
1133 LOG(isolate, ApiObjectAccess("call", JSObject::cast(*args.receiver())));
1134 DCHECK(raw_holder->IsJSObject());
1136 FunctionCallbackArguments custom(isolate,
1144 v8::Handle<v8::Value> value = custom.Call(callback);
1145 if (value.IsEmpty()) {
1146 result = heap->undefined_value();
1148 result = *reinterpret_cast<Object**>(*value);
1149 result->VerifyApiCallResultType();
1152 RETURN_FAILURE_IF_SCHEDULED_EXCEPTION(isolate);
1153 if (!is_construct || result->IsJSObject()) return result;
1156 return *args.receiver();
1160 BUILTIN(HandleApiCall) {
1161 return HandleApiCallHelper<false>(args, isolate);
1165 BUILTIN(HandleApiCallConstruct) {
1166 return HandleApiCallHelper<true>(args, isolate);
1170 // Helper function to handle calls to non-function objects created through the
1171 // API. The object can be called as either a constructor (using new) or just as
1172 // a function (without new).
1173 MUST_USE_RESULT static Object* HandleApiCallAsFunctionOrConstructor(
1175 bool is_construct_call,
1176 BuiltinArguments<NO_EXTRA_ARGUMENTS> args) {
1177 // Non-functions are never called as constructors. Even if this is an object
1178 // called as a constructor the delegate call is not a construct call.
1179 DCHECK(!CalledAsConstructor(isolate));
1180 Heap* heap = isolate->heap();
1182 Handle<Object> receiver = args.receiver();
1184 // Get the object called.
1185 JSObject* obj = JSObject::cast(*receiver);
1187 // Get the invocation callback from the function descriptor that was
1188 // used to create the called object.
1189 DCHECK(obj->map()->has_instance_call_handler());
1190 JSFunction* constructor = JSFunction::cast(obj->map()->constructor());
1191 DCHECK(constructor->shared()->IsApiFunction());
1193 constructor->shared()->get_api_func_data()->instance_call_handler();
1194 DCHECK(!handler->IsUndefined());
1195 CallHandlerInfo* call_data = CallHandlerInfo::cast(handler);
1196 Object* callback_obj = call_data->callback();
1197 v8::FunctionCallback callback =
1198 v8::ToCData<v8::FunctionCallback>(callback_obj);
1200 // Get the data for the call and perform the callback.
1203 HandleScope scope(isolate);
1204 LOG(isolate, ApiObjectAccess("call non-function", obj));
1206 FunctionCallbackArguments custom(isolate,
1213 v8::Handle<v8::Value> value = custom.Call(callback);
1214 if (value.IsEmpty()) {
1215 result = heap->undefined_value();
1217 result = *reinterpret_cast<Object**>(*value);
1218 result->VerifyApiCallResultType();
1221 // Check for exceptions and return result.
1222 RETURN_FAILURE_IF_SCHEDULED_EXCEPTION(isolate);
1227 // Handle calls to non-function objects created through the API. This delegate
1228 // function is used when the call is a normal function call.
1229 BUILTIN(HandleApiCallAsFunction) {
1230 return HandleApiCallAsFunctionOrConstructor(isolate, false, args);
1234 // Handle calls to non-function objects created through the API. This delegate
1235 // function is used when the call is a construct call.
1236 BUILTIN(HandleApiCallAsConstructor) {
1237 return HandleApiCallAsFunctionOrConstructor(isolate, true, args);
1241 static void Generate_LoadIC_Miss(MacroAssembler* masm) {
1242 LoadIC::GenerateMiss(masm);
1246 static void Generate_LoadIC_Normal(MacroAssembler* masm) {
1247 LoadIC::GenerateNormal(masm);
1251 static void Generate_LoadIC_Getter_ForDeopt(MacroAssembler* masm) {
1252 NamedLoadHandlerCompiler::GenerateLoadViaGetterForDeopt(masm);
1256 static void Generate_LoadIC_Slow(MacroAssembler* masm) {
1257 LoadIC::GenerateRuntimeGetProperty(masm);
1261 static void Generate_KeyedLoadIC_Initialize(MacroAssembler* masm) {
1262 KeyedLoadIC::GenerateInitialize(masm);
1266 static void Generate_KeyedLoadIC_Slow(MacroAssembler* masm) {
1267 KeyedLoadIC::GenerateRuntimeGetProperty(masm);
1271 static void Generate_KeyedLoadIC_Miss(MacroAssembler* masm) {
1272 KeyedLoadIC::GenerateMiss(masm);
1276 static void Generate_KeyedLoadIC_Generic(MacroAssembler* masm) {
1277 KeyedLoadIC::GenerateGeneric(masm);
1281 static void Generate_KeyedLoadIC_String(MacroAssembler* masm) {
1282 KeyedLoadIC::GenerateString(masm);
1286 static void Generate_KeyedLoadIC_PreMonomorphic(MacroAssembler* masm) {
1287 KeyedLoadIC::GeneratePreMonomorphic(masm);
1291 static void Generate_KeyedLoadIC_IndexedInterceptor(MacroAssembler* masm) {
1292 KeyedLoadIC::GenerateIndexedInterceptor(masm);
1296 static void Generate_KeyedLoadIC_SloppyArguments(MacroAssembler* masm) {
1297 KeyedLoadIC::GenerateSloppyArguments(masm);
1301 static void Generate_StoreIC_Slow(MacroAssembler* masm) {
1302 StoreIC::GenerateSlow(masm);
1306 static void Generate_StoreIC_Miss(MacroAssembler* masm) {
1307 StoreIC::GenerateMiss(masm);
1311 static void Generate_StoreIC_Normal(MacroAssembler* masm) {
1312 StoreIC::GenerateNormal(masm);
1316 static void Generate_StoreIC_Setter_ForDeopt(MacroAssembler* masm) {
1317 NamedStoreHandlerCompiler::GenerateStoreViaSetterForDeopt(masm);
1321 static void Generate_KeyedStoreIC_Generic(MacroAssembler* masm) {
1322 KeyedStoreIC::GenerateGeneric(masm, SLOPPY);
1326 static void Generate_KeyedStoreIC_Generic_Strict(MacroAssembler* masm) {
1327 KeyedStoreIC::GenerateGeneric(masm, STRICT);
1331 static void Generate_KeyedStoreIC_Miss(MacroAssembler* masm) {
1332 KeyedStoreIC::GenerateMiss(masm);
1336 static void Generate_KeyedStoreIC_Slow(MacroAssembler* masm) {
1337 KeyedStoreIC::GenerateSlow(masm);
1341 static void Generate_KeyedStoreIC_Initialize(MacroAssembler* masm) {
1342 KeyedStoreIC::GenerateInitialize(masm);
1346 static void Generate_KeyedStoreIC_Initialize_Strict(MacroAssembler* masm) {
1347 KeyedStoreIC::GenerateInitialize(masm);
1351 static void Generate_KeyedStoreIC_PreMonomorphic(MacroAssembler* masm) {
1352 KeyedStoreIC::GeneratePreMonomorphic(masm);
1356 static void Generate_KeyedStoreIC_PreMonomorphic_Strict(MacroAssembler* masm) {
1357 KeyedStoreIC::GeneratePreMonomorphic(masm);
1361 static void Generate_KeyedStoreIC_SloppyArguments(MacroAssembler* masm) {
1362 KeyedStoreIC::GenerateSloppyArguments(masm);
1366 static void Generate_CallICStub_DebugBreak(MacroAssembler* masm) {
1367 DebugCodegen::GenerateCallICStubDebugBreak(masm);
1371 static void Generate_LoadIC_DebugBreak(MacroAssembler* masm) {
1372 DebugCodegen::GenerateLoadICDebugBreak(masm);
1376 static void Generate_StoreIC_DebugBreak(MacroAssembler* masm) {
1377 DebugCodegen::GenerateStoreICDebugBreak(masm);
1381 static void Generate_KeyedLoadIC_DebugBreak(MacroAssembler* masm) {
1382 DebugCodegen::GenerateKeyedLoadICDebugBreak(masm);
1386 static void Generate_KeyedStoreIC_DebugBreak(MacroAssembler* masm) {
1387 DebugCodegen::GenerateKeyedStoreICDebugBreak(masm);
1391 static void Generate_CompareNilIC_DebugBreak(MacroAssembler* masm) {
1392 DebugCodegen::GenerateCompareNilICDebugBreak(masm);
1396 static void Generate_Return_DebugBreak(MacroAssembler* masm) {
1397 DebugCodegen::GenerateReturnDebugBreak(masm);
1401 static void Generate_CallFunctionStub_DebugBreak(MacroAssembler* masm) {
1402 DebugCodegen::GenerateCallFunctionStubDebugBreak(masm);
1406 static void Generate_CallConstructStub_DebugBreak(MacroAssembler* masm) {
1407 DebugCodegen::GenerateCallConstructStubDebugBreak(masm);
1411 static void Generate_CallConstructStub_Recording_DebugBreak(
1412 MacroAssembler* masm) {
1413 DebugCodegen::GenerateCallConstructStubRecordDebugBreak(masm);
1417 static void Generate_Slot_DebugBreak(MacroAssembler* masm) {
1418 DebugCodegen::GenerateSlotDebugBreak(masm);
1422 static void Generate_PlainReturn_LiveEdit(MacroAssembler* masm) {
1423 DebugCodegen::GeneratePlainReturnLiveEdit(masm);
1427 static void Generate_FrameDropper_LiveEdit(MacroAssembler* masm) {
1428 DebugCodegen::GenerateFrameDropperLiveEdit(masm);
1432 Builtins::Builtins() : initialized_(false) {
1433 memset(builtins_, 0, sizeof(builtins_[0]) * builtin_count);
1434 memset(names_, 0, sizeof(names_[0]) * builtin_count);
1438 Builtins::~Builtins() {
1442 #define DEF_ENUM_C(name, ignore) FUNCTION_ADDR(Builtin_##name),
1443 Address const Builtins::c_functions_[cfunction_count] = {
1444 BUILTIN_LIST_C(DEF_ENUM_C)
1448 #define DEF_JS_NAME(name, ignore) #name,
1449 #define DEF_JS_ARGC(ignore, argc) argc,
1450 const char* const Builtins::javascript_names_[id_count] = {
1451 BUILTINS_LIST_JS(DEF_JS_NAME)
1454 int const Builtins::javascript_argc_[id_count] = {
1455 BUILTINS_LIST_JS(DEF_JS_ARGC)
1460 struct BuiltinDesc {
1463 const char* s_name; // name is only used for generating log information.
1466 BuiltinExtraArguments extra_args;
1469 #define BUILTIN_FUNCTION_TABLE_INIT { V8_ONCE_INIT, {} }
1471 class BuiltinFunctionTable {
1473 BuiltinDesc* functions() {
1474 base::CallOnce(&once_, &Builtins::InitBuiltinFunctionTable);
1478 base::OnceType once_;
1479 BuiltinDesc functions_[Builtins::builtin_count + 1];
1481 friend class Builtins;
1484 static BuiltinFunctionTable builtin_function_table =
1485 BUILTIN_FUNCTION_TABLE_INIT;
1487 // Define array of pointers to generators and C builtin functions.
1488 // We do this in a sort of roundabout way so that we can do the initialization
1489 // within the lexical scope of Builtins:: and within a context where
1490 // Code::Flags names a non-abstract type.
1491 void Builtins::InitBuiltinFunctionTable() {
1492 BuiltinDesc* functions = builtin_function_table.functions_;
1493 functions[builtin_count].generator = NULL;
1494 functions[builtin_count].c_code = NULL;
1495 functions[builtin_count].s_name = NULL;
1496 functions[builtin_count].name = builtin_count;
1497 functions[builtin_count].flags = static_cast<Code::Flags>(0);
1498 functions[builtin_count].extra_args = NO_EXTRA_ARGUMENTS;
1500 #define DEF_FUNCTION_PTR_C(aname, aextra_args) \
1501 functions->generator = FUNCTION_ADDR(Generate_Adaptor); \
1502 functions->c_code = FUNCTION_ADDR(Builtin_##aname); \
1503 functions->s_name = #aname; \
1504 functions->name = c_##aname; \
1505 functions->flags = Code::ComputeFlags(Code::BUILTIN); \
1506 functions->extra_args = aextra_args; \
1509 #define DEF_FUNCTION_PTR_A(aname, kind, state, extra) \
1510 functions->generator = FUNCTION_ADDR(Generate_##aname); \
1511 functions->c_code = NULL; \
1512 functions->s_name = #aname; \
1513 functions->name = k##aname; \
1514 functions->flags = Code::ComputeFlags(Code::kind, \
1517 functions->extra_args = NO_EXTRA_ARGUMENTS; \
1520 #define DEF_FUNCTION_PTR_H(aname, kind) \
1521 functions->generator = FUNCTION_ADDR(Generate_##aname); \
1522 functions->c_code = NULL; \
1523 functions->s_name = #aname; \
1524 functions->name = k##aname; \
1525 functions->flags = Code::ComputeHandlerFlags(Code::kind); \
1526 functions->extra_args = NO_EXTRA_ARGUMENTS; \
1529 BUILTIN_LIST_C(DEF_FUNCTION_PTR_C)
1530 BUILTIN_LIST_A(DEF_FUNCTION_PTR_A)
1531 BUILTIN_LIST_H(DEF_FUNCTION_PTR_H)
1532 BUILTIN_LIST_DEBUG_A(DEF_FUNCTION_PTR_A)
1534 #undef DEF_FUNCTION_PTR_C
1535 #undef DEF_FUNCTION_PTR_A
1539 void Builtins::SetUp(Isolate* isolate, bool create_heap_objects) {
1540 DCHECK(!initialized_);
1542 // Create a scope for the handles in the builtins.
1543 HandleScope scope(isolate);
1545 const BuiltinDesc* functions = builtin_function_table.functions();
1547 // For now we generate builtin adaptor code into a stack-allocated
1548 // buffer, before copying it into individual code objects. Be careful
1549 // with alignment, some platforms don't like unaligned code.
1551 // We can generate a lot of debug code on Arm64.
1552 const size_t buffer_size = 32*KB;
1554 const size_t buffer_size = 8*KB;
1556 union { int force_alignment; byte buffer[buffer_size]; } u;
1558 // Traverse the list of builtins and generate an adaptor in a
1559 // separate code object for each one.
1560 for (int i = 0; i < builtin_count; i++) {
1561 if (create_heap_objects) {
1562 MacroAssembler masm(isolate, u.buffer, sizeof u.buffer);
1563 // Generate the code/adaptor.
1564 typedef void (*Generator)(MacroAssembler*, int, BuiltinExtraArguments);
1565 Generator g = FUNCTION_CAST<Generator>(functions[i].generator);
1566 // We pass all arguments to the generator, but it may not use all of
1567 // them. This works because the first arguments are on top of the
1569 DCHECK(!masm.has_frame());
1570 g(&masm, functions[i].name, functions[i].extra_args);
1571 // Move the code into the object heap.
1573 masm.GetCode(&desc);
1574 Code::Flags flags = functions[i].flags;
1576 isolate->factory()->NewCode(desc, flags, masm.CodeObject());
1577 // Log the event and add the code to the builtins array.
1579 CodeCreateEvent(Logger::BUILTIN_TAG, *code, functions[i].s_name));
1580 builtins_[i] = *code;
1581 if (code->kind() == Code::BUILTIN) code->set_builtin_index(i);
1582 #ifdef ENABLE_DISASSEMBLER
1583 if (FLAG_print_builtin_code) {
1584 CodeTracer::Scope trace_scope(isolate->GetCodeTracer());
1585 OFStream os(trace_scope.file());
1586 os << "Builtin: " << functions[i].s_name << "\n";
1587 code->Disassemble(functions[i].s_name, os);
1592 // Deserializing. The values will be filled in during IterateBuiltins.
1593 builtins_[i] = NULL;
1595 names_[i] = functions[i].s_name;
1598 // Mark as initialized.
1599 initialized_ = true;
1603 void Builtins::TearDown() {
1604 initialized_ = false;
1608 void Builtins::IterateBuiltins(ObjectVisitor* v) {
1609 v->VisitPointers(&builtins_[0], &builtins_[0] + builtin_count);
1613 const char* Builtins::Lookup(byte* pc) {
1614 // may be called during initialization (disassembler!)
1616 for (int i = 0; i < builtin_count; i++) {
1617 Code* entry = Code::cast(builtins_[i]);
1618 if (entry->contains(pc)) {
1627 void Builtins::Generate_InterruptCheck(MacroAssembler* masm) {
1628 masm->TailCallRuntime(Runtime::kInterrupt, 0, 1);
1632 void Builtins::Generate_StackCheck(MacroAssembler* masm) {
1633 masm->TailCallRuntime(Runtime::kStackGuard, 0, 1);
1637 #define DEFINE_BUILTIN_ACCESSOR_C(name, ignore) \
1638 Handle<Code> Builtins::name() { \
1639 Code** code_address = \
1640 reinterpret_cast<Code**>(builtin_address(k##name)); \
1641 return Handle<Code>(code_address); \
1643 #define DEFINE_BUILTIN_ACCESSOR_A(name, kind, state, extra) \
1644 Handle<Code> Builtins::name() { \
1645 Code** code_address = \
1646 reinterpret_cast<Code**>(builtin_address(k##name)); \
1647 return Handle<Code>(code_address); \
1649 #define DEFINE_BUILTIN_ACCESSOR_H(name, kind) \
1650 Handle<Code> Builtins::name() { \
1651 Code** code_address = \
1652 reinterpret_cast<Code**>(builtin_address(k##name)); \
1653 return Handle<Code>(code_address); \
1655 BUILTIN_LIST_C(DEFINE_BUILTIN_ACCESSOR_C)
1656 BUILTIN_LIST_A(DEFINE_BUILTIN_ACCESSOR_A)
1657 BUILTIN_LIST_H(DEFINE_BUILTIN_ACCESSOR_H)
1658 BUILTIN_LIST_DEBUG_A(DEFINE_BUILTIN_ACCESSOR_A)
1659 #undef DEFINE_BUILTIN_ACCESSOR_C
1660 #undef DEFINE_BUILTIN_ACCESSOR_A
1663 } } // namespace v8::internal