1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
31 #include "arguments.h"
32 #include "bootstrapper.h"
34 #include "cpu-profiler.h"
37 #include "heap-profiler.h"
38 #include "mark-compact.h"
39 #include "stub-cache.h"
40 #include "vm-state-inl.h"
47 // Arguments object passed to C++ builtins.
48 template <BuiltinExtraArguments extra_args>
49 class BuiltinArguments : public Arguments {
51 BuiltinArguments(int length, Object** arguments)
52 : Arguments(length, arguments) { }
54 Object*& operator[] (int index) {
55 ASSERT(index < length());
56 return Arguments::operator[](index);
59 template <class S> Handle<S> at(int index) {
60 ASSERT(index < length());
61 return Arguments::at<S>(index);
64 Handle<Object> receiver() {
65 return Arguments::at<Object>(0);
68 Handle<JSFunction> called_function() {
69 STATIC_ASSERT(extra_args == NEEDS_CALLED_FUNCTION);
70 return Arguments::at<JSFunction>(Arguments::length() - 1);
73 // Gets the total number of arguments including the receiver (but
74 // excluding extra arguments).
76 STATIC_ASSERT(extra_args == NO_EXTRA_ARGUMENTS);
77 return Arguments::length();
82 // Check we have at least the receiver.
83 ASSERT(Arguments::length() >= 1);
89 // Specialize BuiltinArguments for the called function extra argument.
92 int BuiltinArguments<NEEDS_CALLED_FUNCTION>::length() const {
93 return Arguments::length() - 1;
98 void BuiltinArguments<NEEDS_CALLED_FUNCTION>::Verify() {
99 // Check we have at least the receiver and the called function.
100 ASSERT(Arguments::length() >= 2);
101 // Make sure cast to JSFunction succeeds.
107 #define DEF_ARG_TYPE(name, spec) \
108 typedef BuiltinArguments<spec> name##ArgumentsType;
109 BUILTIN_LIST_C(DEF_ARG_TYPE)
114 // ----------------------------------------------------------------------------
115 // Support macro for defining builtins in C++.
116 // ----------------------------------------------------------------------------
118 // A builtin function is defined by writing:
124 // In the body of the builtin function the arguments can be accessed
125 // through the BuiltinArguments object args.
129 #define BUILTIN(name) \
130 MUST_USE_RESULT static MaybeObject* Builtin_Impl_##name( \
131 name##ArgumentsType args, Isolate* isolate); \
132 MUST_USE_RESULT static MaybeObject* Builtin_##name( \
133 int args_length, Object** args_object, Isolate* isolate) { \
134 name##ArgumentsType args(args_length, args_object); \
136 return Builtin_Impl_##name(args, isolate); \
138 MUST_USE_RESULT static MaybeObject* Builtin_Impl_##name( \
139 name##ArgumentsType args, Isolate* isolate)
141 #else // For release mode.
143 #define BUILTIN(name) \
144 static MaybeObject* Builtin_impl##name( \
145 name##ArgumentsType args, Isolate* isolate); \
146 static MaybeObject* Builtin_##name( \
147 int args_length, Object** args_object, Isolate* isolate) { \
148 name##ArgumentsType args(args_length, args_object); \
149 return Builtin_impl##name(args, isolate); \
151 static MaybeObject* Builtin_impl##name( \
152 name##ArgumentsType args, Isolate* isolate)
157 static inline bool CalledAsConstructor(Isolate* isolate) {
158 // Calculate the result using a full stack frame iterator and check
159 // that the state of the stack is as we assume it to be in the
161 StackFrameIterator it(isolate);
162 ASSERT(it.frame()->is_exit());
164 StackFrame* frame = it.frame();
165 bool reference_result = frame->is_construct();
166 Address fp = Isolate::c_entry_fp(isolate->thread_local_top());
167 // Because we know fp points to an exit frame we can use the relevant
168 // part of ExitFrame::ComputeCallerState directly.
169 const int kCallerOffset = ExitFrameConstants::kCallerFPOffset;
170 Address caller_fp = Memory::Address_at(fp + kCallerOffset);
171 // This inlines the part of StackFrame::ComputeType that grabs the
172 // type of the current frame. Note that StackFrame::ComputeType
173 // has been specialized for each architecture so if any one of them
174 // changes this code has to be changed as well.
175 const int kMarkerOffset = StandardFrameConstants::kMarkerOffset;
176 const Smi* kConstructMarker = Smi::FromInt(StackFrame::CONSTRUCT);
177 Object* marker = Memory::Object_at(caller_fp + kMarkerOffset);
178 bool result = (marker == kConstructMarker);
179 ASSERT_EQ(result, reference_result);
185 // ----------------------------------------------------------------------------
189 return isolate->heap()->undefined_value(); // Make compiler happy.
193 BUILTIN(EmptyFunction) {
194 return isolate->heap()->undefined_value();
198 static void MoveDoubleElements(FixedDoubleArray* dst,
200 FixedDoubleArray* src,
203 if (len == 0) return;
204 OS::MemMove(dst->data_start() + dst_index,
205 src->data_start() + src_index,
210 static void FillWithHoles(Heap* heap, FixedArray* dst, int from, int to) {
211 ASSERT(dst->map() != heap->fixed_cow_array_map());
212 MemsetPointer(dst->data_start() + from, heap->the_hole_value(), to - from);
216 static void FillWithHoles(FixedDoubleArray* dst, int from, int to) {
217 for (int i = from; i < to; i++) {
218 dst->set_the_hole(i);
223 static FixedArrayBase* LeftTrimFixedArray(Heap* heap,
224 FixedArrayBase* elms,
226 Map* map = elms->map();
228 if (elms->IsFixedArray()) {
229 entry_size = kPointerSize;
231 entry_size = kDoubleSize;
233 ASSERT(elms->map() != heap->fixed_cow_array_map());
234 // For now this trick is only applied to fixed arrays in new and paged space.
235 // In large object space the object's start must coincide with chunk
236 // and thus the trick is just not applicable.
237 ASSERT(!heap->lo_space()->Contains(elms));
239 STATIC_ASSERT(FixedArrayBase::kMapOffset == 0);
240 STATIC_ASSERT(FixedArrayBase::kLengthOffset == kPointerSize);
241 STATIC_ASSERT(FixedArrayBase::kHeaderSize == 2 * kPointerSize);
243 Object** former_start = HeapObject::RawField(elms, 0);
245 const int len = elms->length();
247 if (to_trim * entry_size > FixedArrayBase::kHeaderSize &&
248 elms->IsFixedArray() &&
249 !heap->new_space()->Contains(elms)) {
250 // If we are doing a big trim in old space then we zap the space that was
251 // formerly part of the array so that the GC (aided by the card-based
252 // remembered set) won't find pointers to new-space there.
253 Object** zap = reinterpret_cast<Object**>(elms->address());
254 zap++; // Header of filler must be at least one word so skip that.
255 for (int i = 1; i < to_trim; i++) {
256 *zap++ = Smi::FromInt(0);
259 // Technically in new space this write might be omitted (except for
260 // debug mode which iterates through the heap), but to play safer
262 heap->CreateFillerObjectAt(elms->address(), to_trim * entry_size);
264 int new_start_index = to_trim * (entry_size / kPointerSize);
265 former_start[new_start_index] = map;
266 former_start[new_start_index + 1] = Smi::FromInt(len - to_trim);
268 // Maintain marking consistency for HeapObjectIterator and
269 // IncrementalMarking.
270 int size_delta = to_trim * entry_size;
271 Address new_start = elms->address() + size_delta;
272 heap->marking()->TransferMark(elms->address(), new_start);
273 heap->AdjustLiveBytes(new_start, -size_delta, Heap::FROM_MUTATOR);
275 FixedArrayBase* new_elms =
276 FixedArrayBase::cast(HeapObject::FromAddress(new_start));
277 HeapProfiler* profiler = heap->isolate()->heap_profiler();
278 if (profiler->is_tracking_object_moves()) {
279 profiler->ObjectMoveEvent(elms->address(),
287 static bool ArrayPrototypeHasNoElements(Heap* heap,
288 Context* native_context,
289 JSObject* array_proto) {
290 // This method depends on non writability of Object and Array prototype
292 if (array_proto->elements() != heap->empty_fixed_array()) return false;
294 Object* proto = array_proto->GetPrototype();
295 if (proto == heap->null_value()) return false;
296 array_proto = JSObject::cast(proto);
297 if (array_proto != native_context->initial_object_prototype()) return false;
298 if (array_proto->elements() != heap->empty_fixed_array()) return false;
299 return array_proto->GetPrototype()->IsNull();
303 // Returns empty handle if not applicable.
305 static inline Handle<FixedArrayBase> EnsureJSArrayWithWritableFastElements(
307 Handle<Object> receiver,
309 int first_added_arg) {
310 if (!receiver->IsJSArray()) return Handle<FixedArrayBase>::null();
311 Handle<JSArray> array = Handle<JSArray>::cast(receiver);
312 if (array->map()->is_observed()) return Handle<FixedArrayBase>::null();
313 if (!array->map()->is_extensible()) return Handle<FixedArrayBase>::null();
314 Handle<FixedArrayBase> elms(array->elements());
315 Heap* heap = isolate->heap();
316 Map* map = elms->map();
317 if (map == heap->fixed_array_map()) {
318 if (args == NULL || array->HasFastObjectElements()) return elms;
319 } else if (map == heap->fixed_cow_array_map()) {
320 elms = JSObject::EnsureWritableFastElements(array);
321 if (args == NULL || array->HasFastObjectElements()) return elms;
322 } else if (map == heap->fixed_double_array_map()) {
323 if (args == NULL) return elms;
325 return Handle<FixedArrayBase>::null();
328 // Need to ensure that the arguments passed in args can be contained in
330 int args_length = args->length();
331 if (first_added_arg >= args_length) return handle(array->elements());
333 ElementsKind origin_kind = array->map()->elements_kind();
334 ASSERT(!IsFastObjectElementsKind(origin_kind));
335 ElementsKind target_kind = origin_kind;
336 int arg_count = args->length() - first_added_arg;
337 Object** arguments = args->arguments() - first_added_arg - (arg_count - 1);
338 for (int i = 0; i < arg_count; i++) {
339 Object* arg = arguments[i];
340 if (arg->IsHeapObject()) {
341 if (arg->IsHeapNumber()) {
342 target_kind = FAST_DOUBLE_ELEMENTS;
344 target_kind = FAST_ELEMENTS;
349 if (target_kind != origin_kind) {
350 JSObject::TransitionElementsKind(array, target_kind);
351 return handle(array->elements());
357 // TODO(ishell): Handlify when all Array* builtins are handlified.
358 static inline bool IsJSArrayFastElementMovingAllowed(Heap* heap,
360 if (!FLAG_clever_optimizations) return false;
361 Context* native_context = heap->isolate()->context()->native_context();
362 JSObject* array_proto =
363 JSObject::cast(native_context->array_function()->prototype());
364 return receiver->GetPrototype() == array_proto &&
365 ArrayPrototypeHasNoElements(heap, native_context, array_proto);
369 MUST_USE_RESULT static MaybeObject* CallJsBuiltin(
372 BuiltinArguments<NO_EXTRA_ARGUMENTS> args) {
373 HandleScope handleScope(isolate);
375 Handle<Object> js_builtin =
376 GetProperty(Handle<JSObject>(isolate->native_context()->builtins()),
378 Handle<JSFunction> function = Handle<JSFunction>::cast(js_builtin);
379 int argc = args.length() - 1;
380 ScopedVector<Handle<Object> > argv(argc);
381 for (int i = 0; i < argc; ++i) {
382 argv[i] = args.at<Object>(i + 1);
384 bool pending_exception;
385 Handle<Object> result = Execution::Call(isolate,
391 if (pending_exception) return Failure::Exception();
397 HandleScope scope(isolate);
398 Handle<Object> receiver = args.receiver();
399 Handle<FixedArrayBase> elms_obj =
400 EnsureJSArrayWithWritableFastElements(isolate, receiver, &args, 1);
401 if (elms_obj.is_null()) return CallJsBuiltin(isolate, "ArrayPush", args);
403 Handle<JSArray> array = Handle<JSArray>::cast(receiver);
404 ASSERT(!array->map()->is_observed());
406 ElementsKind kind = array->GetElementsKind();
408 if (IsFastSmiOrObjectElementsKind(kind)) {
409 Handle<FixedArray> elms = Handle<FixedArray>::cast(elms_obj);
411 int len = Smi::cast(array->length())->value();
412 int to_add = args.length() - 1;
414 return Smi::FromInt(len);
416 // Currently fixed arrays cannot grow too big, so
417 // we should never hit this case.
418 ASSERT(to_add <= (Smi::kMaxValue - len));
420 int new_length = len + to_add;
422 if (new_length > elms->length()) {
423 // New backing storage is needed.
424 int capacity = new_length + (new_length >> 1) + 16;
425 Handle<FixedArray> new_elms =
426 isolate->factory()->NewUninitializedFixedArray(capacity);
428 ElementsAccessor* accessor = array->GetElementsAccessor();
429 accessor->CopyElements(
430 Handle<JSObject>::null(), 0, kind, new_elms, 0,
431 ElementsAccessor::kCopyToEndAndInitializeToHole, elms_obj);
436 // Add the provided values.
437 DisallowHeapAllocation no_gc;
438 WriteBarrierMode mode = elms->GetWriteBarrierMode(no_gc);
439 for (int index = 0; index < to_add; index++) {
440 elms->set(index + len, args[index + 1], mode);
443 if (*elms != array->elements()) {
444 array->set_elements(*elms);
448 array->set_length(Smi::FromInt(new_length));
449 return Smi::FromInt(new_length);
451 int len = Smi::cast(array->length())->value();
452 int elms_len = elms_obj->length();
454 int to_add = args.length() - 1;
456 return Smi::FromInt(len);
458 // Currently fixed arrays cannot grow too big, so
459 // we should never hit this case.
460 ASSERT(to_add <= (Smi::kMaxValue - len));
462 int new_length = len + to_add;
464 Handle<FixedDoubleArray> new_elms;
466 if (new_length > elms_len) {
467 // New backing storage is needed.
468 int capacity = new_length + (new_length >> 1) + 16;
469 new_elms = isolate->factory()->NewFixedDoubleArray(capacity);
471 ElementsAccessor* accessor = array->GetElementsAccessor();
472 accessor->CopyElements(
473 Handle<JSObject>::null(), 0, kind, new_elms, 0,
474 ElementsAccessor::kCopyToEndAndInitializeToHole, elms_obj);
477 // to_add is > 0 and new_length <= elms_len, so elms_obj cannot be the
478 // empty_fixed_array.
479 new_elms = Handle<FixedDoubleArray>::cast(elms_obj);
482 // Add the provided values.
483 DisallowHeapAllocation no_gc;
485 for (index = 0; index < to_add; index++) {
486 Object* arg = args[index + 1];
487 new_elms->set(index + len, arg->Number());
490 if (*new_elms != array->elements()) {
491 array->set_elements(*new_elms);
495 array->set_length(Smi::FromInt(new_length));
496 return Smi::FromInt(new_length);
501 // TODO(ishell): Temporary wrapper until handlified.
502 static bool ElementsAccessorHasElementWrapper(
503 ElementsAccessor* accessor,
504 Handle<Object> receiver,
505 Handle<JSObject> holder,
507 Handle<FixedArrayBase> backing_store = Handle<FixedArrayBase>::null()) {
508 return accessor->HasElement(*receiver, *holder, key,
509 backing_store.is_null() ? NULL : *backing_store);
514 HandleScope scope(isolate);
515 Handle<Object> receiver = args.receiver();
516 Handle<FixedArrayBase> elms_obj =
517 EnsureJSArrayWithWritableFastElements(isolate, receiver, NULL, 0);
518 if (elms_obj.is_null()) return CallJsBuiltin(isolate, "ArrayPop", args);
520 Handle<JSArray> array = Handle<JSArray>::cast(receiver);
521 ASSERT(!array->map()->is_observed());
523 int len = Smi::cast(array->length())->value();
524 if (len == 0) return isolate->heap()->undefined_value();
526 ElementsAccessor* accessor = array->GetElementsAccessor();
527 int new_length = len - 1;
528 Handle<Object> element;
529 if (ElementsAccessorHasElementWrapper(
530 accessor, array, array, new_length, elms_obj)) {
531 element = accessor->Get(
532 array, array, new_length, elms_obj);
534 Handle<Object> proto(array->GetPrototype(), isolate);
535 element = Object::GetElement(isolate, proto, len - 1);
537 RETURN_IF_EMPTY_HANDLE(isolate, element);
538 RETURN_IF_EMPTY_HANDLE(isolate,
540 array, handle(Smi::FromInt(new_length), isolate)));
545 BUILTIN(ArrayShift) {
546 HandleScope scope(isolate);
547 Heap* heap = isolate->heap();
548 Handle<Object> receiver = args.receiver();
549 Handle<FixedArrayBase> elms_obj =
550 EnsureJSArrayWithWritableFastElements(isolate, receiver, NULL, 0);
551 if (elms_obj.is_null() ||
552 !IsJSArrayFastElementMovingAllowed(heap,
553 *Handle<JSArray>::cast(receiver))) {
554 return CallJsBuiltin(isolate, "ArrayShift", args);
556 Handle<JSArray> array = Handle<JSArray>::cast(receiver);
557 ASSERT(!array->map()->is_observed());
559 int len = Smi::cast(array->length())->value();
560 if (len == 0) return heap->undefined_value();
563 ElementsAccessor* accessor = array->GetElementsAccessor();
564 Handle<Object> first = accessor->Get(receiver, array, 0, elms_obj);
565 RETURN_IF_EMPTY_HANDLE(isolate, first);
566 if (first->IsTheHole()) {
567 first = isolate->factory()->undefined_value();
570 if (!heap->CanMoveObjectStart(*elms_obj)) {
571 array->set_elements(LeftTrimFixedArray(heap, *elms_obj, 1));
573 // Shift the elements.
574 if (elms_obj->IsFixedArray()) {
575 Handle<FixedArray> elms = Handle<FixedArray>::cast(elms_obj);
576 DisallowHeapAllocation no_gc;
577 heap->MoveElements(*elms, 0, 1, len - 1);
578 elms->set(len - 1, heap->the_hole_value());
580 Handle<FixedDoubleArray> elms = Handle<FixedDoubleArray>::cast(elms_obj);
581 MoveDoubleElements(*elms, 0, *elms, 1, len - 1);
582 elms->set_the_hole(len - 1);
587 array->set_length(Smi::FromInt(len - 1));
593 BUILTIN(ArrayUnshift) {
594 HandleScope scope(isolate);
595 Heap* heap = isolate->heap();
596 Handle<Object> receiver = args.receiver();
597 Handle<FixedArrayBase> elms_obj =
598 EnsureJSArrayWithWritableFastElements(isolate, receiver, NULL, 0);
599 if (elms_obj.is_null() ||
600 !IsJSArrayFastElementMovingAllowed(heap,
601 *Handle<JSArray>::cast(receiver))) {
602 return CallJsBuiltin(isolate, "ArrayUnshift", args);
604 Handle<JSArray> array = Handle<JSArray>::cast(receiver);
605 ASSERT(!array->map()->is_observed());
606 if (!array->HasFastSmiOrObjectElements()) {
607 return CallJsBuiltin(isolate, "ArrayUnshift", args);
609 Handle<FixedArray> elms = Handle<FixedArray>::cast(elms_obj);
611 int len = Smi::cast(array->length())->value();
612 int to_add = args.length() - 1;
613 int new_length = len + to_add;
614 // Currently fixed arrays cannot grow too big, so
615 // we should never hit this case.
616 ASSERT(to_add <= (Smi::kMaxValue - len));
618 JSObject::EnsureCanContainElements(array, &args, 1, to_add,
619 DONT_ALLOW_DOUBLE_ELEMENTS);
621 if (new_length > elms->length()) {
622 // New backing storage is needed.
623 int capacity = new_length + (new_length >> 1) + 16;
624 Handle<FixedArray> new_elms =
625 isolate->factory()->NewUninitializedFixedArray(capacity);
627 ElementsKind kind = array->GetElementsKind();
628 ElementsAccessor* accessor = array->GetElementsAccessor();
629 accessor->CopyElements(
630 Handle<JSObject>::null(), 0, kind, new_elms, to_add,
631 ElementsAccessor::kCopyToEndAndInitializeToHole, elms);
634 array->set_elements(*elms);
636 DisallowHeapAllocation no_gc;
637 heap->MoveElements(*elms, to_add, 0, len);
640 // Add the provided values.
641 DisallowHeapAllocation no_gc;
642 WriteBarrierMode mode = elms->GetWriteBarrierMode(no_gc);
643 for (int i = 0; i < to_add; i++) {
644 elms->set(i, args[i + 1], mode);
648 array->set_length(Smi::FromInt(new_length));
649 return Smi::FromInt(new_length);
653 BUILTIN(ArraySlice) {
654 Heap* heap = isolate->heap();
655 Object* receiver = *args.receiver();
656 FixedArrayBase* elms;
658 if (receiver->IsJSArray()) {
659 JSArray* array = JSArray::cast(receiver);
660 if (!IsJSArrayFastElementMovingAllowed(heap, array)) {
661 return CallJsBuiltin(isolate, "ArraySlice", args);
664 if (array->HasFastElements()) {
665 elms = array->elements();
667 return CallJsBuiltin(isolate, "ArraySlice", args);
670 len = Smi::cast(array->length())->value();
672 // Array.slice(arguments, ...) is quite a common idiom (notably more
673 // than 50% of invocations in Web apps). Treat it in C++ as well.
674 Map* arguments_map = isolate->context()->native_context()->
675 sloppy_arguments_boilerplate()->map();
677 bool is_arguments_object_with_fast_elements =
678 receiver->IsJSObject() &&
679 JSObject::cast(receiver)->map() == arguments_map;
680 if (!is_arguments_object_with_fast_elements) {
681 return CallJsBuiltin(isolate, "ArraySlice", args);
683 JSObject* object = JSObject::cast(receiver);
685 if (object->HasFastElements()) {
686 elms = object->elements();
688 return CallJsBuiltin(isolate, "ArraySlice", args);
690 Object* len_obj = object->InObjectPropertyAt(Heap::kArgumentsLengthIndex);
691 if (!len_obj->IsSmi()) {
692 return CallJsBuiltin(isolate, "ArraySlice", args);
694 len = Smi::cast(len_obj)->value();
695 if (len > elms->length()) {
696 return CallJsBuiltin(isolate, "ArraySlice", args);
700 JSObject* object = JSObject::cast(receiver);
703 int n_arguments = args.length() - 1;
705 // Note carefully choosen defaults---if argument is missing,
706 // it's undefined which gets converted to 0 for relative_start
707 // and to len for relative_end.
708 int relative_start = 0;
709 int relative_end = len;
710 if (n_arguments > 0) {
711 Object* arg1 = args[1];
713 relative_start = Smi::cast(arg1)->value();
714 } else if (arg1->IsHeapNumber()) {
715 double start = HeapNumber::cast(arg1)->value();
716 if (start < kMinInt || start > kMaxInt) {
717 return CallJsBuiltin(isolate, "ArraySlice", args);
719 relative_start = std::isnan(start) ? 0 : static_cast<int>(start);
720 } else if (!arg1->IsUndefined()) {
721 return CallJsBuiltin(isolate, "ArraySlice", args);
723 if (n_arguments > 1) {
724 Object* arg2 = args[2];
726 relative_end = Smi::cast(arg2)->value();
727 } else if (arg2->IsHeapNumber()) {
728 double end = HeapNumber::cast(arg2)->value();
729 if (end < kMinInt || end > kMaxInt) {
730 return CallJsBuiltin(isolate, "ArraySlice", args);
732 relative_end = std::isnan(end) ? 0 : static_cast<int>(end);
733 } else if (!arg2->IsUndefined()) {
734 return CallJsBuiltin(isolate, "ArraySlice", args);
739 // ECMAScript 232, 3rd Edition, Section 15.4.4.10, step 6.
740 int k = (relative_start < 0) ? Max(len + relative_start, 0)
741 : Min(relative_start, len);
743 // ECMAScript 232, 3rd Edition, Section 15.4.4.10, step 8.
744 int final = (relative_end < 0) ? Max(len + relative_end, 0)
745 : Min(relative_end, len);
747 // Calculate the length of result array.
748 int result_len = Max(final - k, 0);
750 ElementsKind kind = object->GetElementsKind();
751 if (IsHoleyElementsKind(kind)) {
753 ElementsAccessor* accessor = ElementsAccessor::ForKind(kind);
754 for (int i = k; i < final; i++) {
755 if (!accessor->HasElement(object, object, i, elms)) {
761 kind = GetPackedElementsKind(kind);
762 } else if (!receiver->IsJSArray()) {
763 return CallJsBuiltin(isolate, "ArraySlice", args);
767 JSArray* result_array;
768 MaybeObject* maybe_array = heap->AllocateJSArrayAndStorage(kind,
772 DisallowHeapAllocation no_gc;
773 if (result_len == 0) return maybe_array;
774 if (!maybe_array->To(&result_array)) return maybe_array;
776 ElementsAccessor* accessor = object->GetElementsAccessor();
777 MaybeObject* maybe_failure = accessor->CopyElements(
778 NULL, k, kind, result_array->elements(), 0, result_len, elms);
779 ASSERT(!maybe_failure->IsFailure());
786 BUILTIN(ArraySplice) {
787 HandleScope scope(isolate);
788 Heap* heap = isolate->heap();
789 Handle<Object> receiver = args.receiver();
790 Handle<FixedArrayBase> elms_obj =
791 EnsureJSArrayWithWritableFastElements(isolate, receiver, &args, 3);
792 if (elms_obj.is_null() ||
793 !IsJSArrayFastElementMovingAllowed(heap,
794 *Handle<JSArray>::cast(receiver))) {
795 return CallJsBuiltin(isolate, "ArraySplice", args);
797 Handle<JSArray> array = Handle<JSArray>::cast(receiver);
798 ASSERT(!array->map()->is_observed());
800 int len = Smi::cast(array->length())->value();
802 int n_arguments = args.length() - 1;
804 int relative_start = 0;
805 if (n_arguments > 0) {
806 Handle<Object> arg1 = args.at<Object>(1);
808 relative_start = Handle<Smi>::cast(arg1)->value();
809 } else if (arg1->IsHeapNumber()) {
810 double start = Handle<HeapNumber>::cast(arg1)->value();
811 if (start < kMinInt || start > kMaxInt) {
812 return CallJsBuiltin(isolate, "ArraySplice", args);
814 relative_start = std::isnan(start) ? 0 : static_cast<int>(start);
815 } else if (!arg1->IsUndefined()) {
816 return CallJsBuiltin(isolate, "ArraySplice", args);
819 int actual_start = (relative_start < 0) ? Max(len + relative_start, 0)
820 : Min(relative_start, len);
822 // SpiderMonkey, TraceMonkey and JSC treat the case where no delete count is
823 // given as a request to delete all the elements from the start.
824 // And it differs from the case of undefined delete count.
825 // This does not follow ECMA-262, but we do the same for
827 int actual_delete_count;
828 if (n_arguments == 1) {
829 ASSERT(len - actual_start >= 0);
830 actual_delete_count = len - actual_start;
832 int value = 0; // ToInteger(undefined) == 0
833 if (n_arguments > 1) {
834 Object* arg2 = args[2];
836 value = Smi::cast(arg2)->value();
838 return CallJsBuiltin(isolate, "ArraySplice", args);
841 actual_delete_count = Min(Max(value, 0), len - actual_start);
844 ElementsKind elements_kind = array->GetElementsKind();
846 int item_count = (n_arguments > 1) ? (n_arguments - 2) : 0;
847 int new_length = len - actual_delete_count + item_count;
849 // For double mode we do not support changing the length.
850 if (new_length > len && IsFastDoubleElementsKind(elements_kind)) {
851 return CallJsBuiltin(isolate, "ArraySplice", args);
854 if (new_length == 0) {
855 Handle<JSArray> result = isolate->factory()->NewJSArrayWithElements(
856 elms_obj, elements_kind, actual_delete_count);
857 array->set_elements(heap->empty_fixed_array());
858 array->set_length(Smi::FromInt(0));
862 Handle<JSArray> result_array =
863 isolate->factory()->NewJSArray(elements_kind,
865 actual_delete_count);
867 if (actual_delete_count > 0) {
868 DisallowHeapAllocation no_gc;
869 ElementsAccessor* accessor = array->GetElementsAccessor();
870 accessor->CopyElements(
871 Handle<JSObject>::null(), actual_start, elements_kind,
872 handle(result_array->elements()), 0, actual_delete_count, elms_obj);
875 bool elms_changed = false;
876 if (item_count < actual_delete_count) {
878 const bool trim_array = !heap->lo_space()->Contains(*elms_obj) &&
879 ((actual_start + item_count) <
880 (len - actual_delete_count - actual_start));
882 const int delta = actual_delete_count - item_count;
884 if (elms_obj->IsFixedDoubleArray()) {
885 Handle<FixedDoubleArray> elms =
886 Handle<FixedDoubleArray>::cast(elms_obj);
887 MoveDoubleElements(*elms, delta, *elms, 0, actual_start);
889 Handle<FixedArray> elms = Handle<FixedArray>::cast(elms_obj);
890 DisallowHeapAllocation no_gc;
891 heap->MoveElements(*elms, delta, 0, actual_start);
894 if (heap->CanMoveObjectStart(*elms_obj)) {
895 // On the fast path we move the start of the object in memory.
896 elms_obj = handle(LeftTrimFixedArray(heap, *elms_obj, delta));
898 // This is the slow path. We are going to move the elements to the left
899 // by copying them. For trimmed values we store the hole.
900 if (elms_obj->IsFixedDoubleArray()) {
901 Handle<FixedDoubleArray> elms =
902 Handle<FixedDoubleArray>::cast(elms_obj);
903 MoveDoubleElements(*elms, 0, *elms, delta, len - delta);
904 FillWithHoles(*elms, len - delta, len);
906 Handle<FixedArray> elms = Handle<FixedArray>::cast(elms_obj);
907 DisallowHeapAllocation no_gc;
908 heap->MoveElements(*elms, 0, delta, len - delta);
909 FillWithHoles(heap, *elms, len - delta, len);
914 if (elms_obj->IsFixedDoubleArray()) {
915 Handle<FixedDoubleArray> elms =
916 Handle<FixedDoubleArray>::cast(elms_obj);
917 MoveDoubleElements(*elms, actual_start + item_count,
918 *elms, actual_start + actual_delete_count,
919 (len - actual_delete_count - actual_start));
920 FillWithHoles(*elms, new_length, len);
922 Handle<FixedArray> elms = Handle<FixedArray>::cast(elms_obj);
923 DisallowHeapAllocation no_gc;
924 heap->MoveElements(*elms, actual_start + item_count,
925 actual_start + actual_delete_count,
926 (len - actual_delete_count - actual_start));
927 FillWithHoles(heap, *elms, new_length, len);
930 } else if (item_count > actual_delete_count) {
931 Handle<FixedArray> elms = Handle<FixedArray>::cast(elms_obj);
932 // Currently fixed arrays cannot grow too big, so
933 // we should never hit this case.
934 ASSERT((item_count - actual_delete_count) <= (Smi::kMaxValue - len));
936 // Check if array need to grow.
937 if (new_length > elms->length()) {
938 // New backing storage is needed.
939 int capacity = new_length + (new_length >> 1) + 16;
940 Handle<FixedArray> new_elms =
941 isolate->factory()->NewUninitializedFixedArray(capacity);
943 DisallowHeapAllocation no_gc;
945 ElementsKind kind = array->GetElementsKind();
946 ElementsAccessor* accessor = array->GetElementsAccessor();
947 if (actual_start > 0) {
948 // Copy the part before actual_start as is.
949 accessor->CopyElements(
950 Handle<JSObject>::null(), 0, kind, new_elms, 0, actual_start, elms);
952 accessor->CopyElements(
953 Handle<JSObject>::null(), actual_start + actual_delete_count, kind,
954 new_elms, actual_start + item_count,
955 ElementsAccessor::kCopyToEndAndInitializeToHole, elms);
960 DisallowHeapAllocation no_gc;
961 heap->MoveElements(*elms, actual_start + item_count,
962 actual_start + actual_delete_count,
963 (len - actual_delete_count - actual_start));
967 if (IsFastDoubleElementsKind(elements_kind)) {
968 Handle<FixedDoubleArray> elms = Handle<FixedDoubleArray>::cast(elms_obj);
969 for (int k = actual_start; k < actual_start + item_count; k++) {
970 Object* arg = args[3 + k - actual_start];
972 elms->set(k, Smi::cast(arg)->value());
974 elms->set(k, HeapNumber::cast(arg)->value());
978 Handle<FixedArray> elms = Handle<FixedArray>::cast(elms_obj);
979 DisallowHeapAllocation no_gc;
980 WriteBarrierMode mode = elms->GetWriteBarrierMode(no_gc);
981 for (int k = actual_start; k < actual_start + item_count; k++) {
982 elms->set(k, args[3 + k - actual_start], mode);
987 array->set_elements(*elms_obj);
990 array->set_length(Smi::FromInt(new_length));
992 return *result_array;
996 BUILTIN(ArrayConcat) {
997 Heap* heap = isolate->heap();
998 Context* native_context = isolate->context()->native_context();
999 JSObject* array_proto =
1000 JSObject::cast(native_context->array_function()->prototype());
1001 if (!ArrayPrototypeHasNoElements(heap, native_context, array_proto)) {
1002 return CallJsBuiltin(isolate, "ArrayConcat", args);
1005 // Iterate through all the arguments performing checks
1006 // and calculating total length.
1007 int n_arguments = args.length();
1009 ElementsKind elements_kind = GetInitialFastElementsKind();
1010 bool has_double = false;
1011 bool is_holey = false;
1012 for (int i = 0; i < n_arguments; i++) {
1013 Object* arg = args[i];
1014 if (!arg->IsJSArray() ||
1015 !JSArray::cast(arg)->HasFastElements() ||
1016 JSArray::cast(arg)->GetPrototype() != array_proto) {
1017 return CallJsBuiltin(isolate, "ArrayConcat", args);
1019 int len = Smi::cast(JSArray::cast(arg)->length())->value();
1021 // We shouldn't overflow when adding another len.
1022 const int kHalfOfMaxInt = 1 << (kBitsPerInt - 2);
1023 STATIC_ASSERT(FixedArray::kMaxLength < kHalfOfMaxInt);
1026 ASSERT(result_len >= 0);
1028 if (result_len > FixedDoubleArray::kMaxLength) {
1029 return CallJsBuiltin(isolate, "ArrayConcat", args);
1032 ElementsKind arg_kind = JSArray::cast(arg)->map()->elements_kind();
1033 has_double = has_double || IsFastDoubleElementsKind(arg_kind);
1034 is_holey = is_holey || IsFastHoleyElementsKind(arg_kind);
1035 if (IsMoreGeneralElementsKindTransition(elements_kind, arg_kind)) {
1036 elements_kind = arg_kind;
1040 if (is_holey) elements_kind = GetHoleyElementsKind(elements_kind);
1042 // If a double array is concatted into a fast elements array, the fast
1043 // elements array needs to be initialized to contain proper holes, since
1044 // boxing doubles may cause incremental marking.
1045 ArrayStorageAllocationMode mode =
1046 has_double && IsFastObjectElementsKind(elements_kind)
1047 ? INITIALIZE_ARRAY_ELEMENTS_WITH_HOLE : DONT_INITIALIZE_ARRAY_ELEMENTS;
1048 JSArray* result_array;
1050 MaybeObject* maybe_array =
1051 heap->AllocateJSArrayAndStorage(elements_kind,
1055 if (!maybe_array->To(&result_array)) return maybe_array;
1056 if (result_len == 0) return result_array;
1059 FixedArrayBase* storage = result_array->elements();
1060 ElementsAccessor* accessor = ElementsAccessor::ForKind(elements_kind);
1061 for (int i = 0; i < n_arguments; i++) {
1062 JSArray* array = JSArray::cast(args[i]);
1063 int len = Smi::cast(array->length())->value();
1064 ElementsKind from_kind = array->GetElementsKind();
1066 MaybeObject* maybe_failure =
1067 accessor->CopyElements(array, 0, from_kind, storage, j, len);
1068 if (maybe_failure->IsFailure()) return maybe_failure;
1073 ASSERT(j == result_len);
1075 return result_array;
1079 // -----------------------------------------------------------------------------
1080 // Strict mode poison pills
1083 BUILTIN(StrictModePoisonPill) {
1084 HandleScope scope(isolate);
1085 return isolate->Throw(*isolate->factory()->NewTypeError(
1086 "strict_poison_pill", HandleVector<Object>(NULL, 0)));
1090 // -----------------------------------------------------------------------------
1094 // Searches the hidden prototype chain of the given object for the first
1095 // object that is an instance of the given type. If no such object can
1096 // be found then Heap::null_value() is returned.
1097 static inline Object* FindHidden(Heap* heap,
1099 FunctionTemplateInfo* type) {
1100 if (type->IsTemplateFor(object)) return object;
1101 Object* proto = object->GetPrototype(heap->isolate());
1102 if (proto->IsJSObject() &&
1103 JSObject::cast(proto)->map()->is_hidden_prototype()) {
1104 return FindHidden(heap, proto, type);
1106 return heap->null_value();
1110 // Returns the holder JSObject if the function can legally be called
1111 // with this receiver. Returns Heap::null_value() if the call is
1112 // illegal. Any arguments that don't fit the expected type is
1113 // overwritten with undefined. Note that holder and the arguments are
1114 // implicitly rewritten with the first object in the hidden prototype
1115 // chain that actually has the expected type.
1116 static inline Object* TypeCheck(Heap* heap,
1119 FunctionTemplateInfo* info) {
1120 Object* recv = argv[0];
1121 // API calls are only supported with JSObject receivers.
1122 if (!recv->IsJSObject()) return heap->null_value();
1123 Object* sig_obj = info->signature();
1124 if (sig_obj->IsUndefined()) return recv;
1125 SignatureInfo* sig = SignatureInfo::cast(sig_obj);
1126 // If necessary, check the receiver
1127 Object* recv_type = sig->receiver();
1128 Object* holder = recv;
1129 if (!recv_type->IsUndefined()) {
1130 holder = FindHidden(heap, holder, FunctionTemplateInfo::cast(recv_type));
1131 if (holder == heap->null_value()) return heap->null_value();
1133 Object* args_obj = sig->args();
1134 // If there is no argument signature we're done
1135 if (args_obj->IsUndefined()) return holder;
1136 FixedArray* args = FixedArray::cast(args_obj);
1137 int length = args->length();
1138 if (argc <= length) length = argc - 1;
1139 for (int i = 0; i < length; i++) {
1140 Object* argtype = args->get(i);
1141 if (argtype->IsUndefined()) continue;
1142 Object** arg = &argv[-1 - i];
1143 Object* current = *arg;
1144 current = FindHidden(heap, current, FunctionTemplateInfo::cast(argtype));
1145 if (current == heap->null_value()) current = heap->undefined_value();
1152 template <bool is_construct>
1153 MUST_USE_RESULT static MaybeObject* HandleApiCallHelper(
1154 BuiltinArguments<NEEDS_CALLED_FUNCTION> args, Isolate* isolate) {
1155 ASSERT(is_construct == CalledAsConstructor(isolate));
1156 Heap* heap = isolate->heap();
1158 HandleScope scope(isolate);
1159 Handle<JSFunction> function = args.called_function();
1160 ASSERT(function->shared()->IsApiFunction());
1162 FunctionTemplateInfo* fun_data = function->shared()->get_api_func_data();
1164 Handle<FunctionTemplateInfo> desc(fun_data, isolate);
1165 bool pending_exception = false;
1166 isolate->factory()->ConfigureInstance(
1167 desc, Handle<JSObject>::cast(args.receiver()), &pending_exception);
1168 ASSERT(isolate->has_pending_exception() == pending_exception);
1169 if (pending_exception) return Failure::Exception();
1173 SharedFunctionInfo* shared = function->shared();
1174 if (shared->strict_mode() == SLOPPY && !shared->native()) {
1175 Object* recv = args[0];
1176 ASSERT(!recv->IsNull());
1177 if (recv->IsUndefined()) {
1178 args[0] = function->context()->global_object()->global_receiver();
1182 Object* raw_holder = TypeCheck(heap, args.length(), &args[0], fun_data);
1184 if (raw_holder->IsNull()) {
1185 // This function cannot be called with the given receiver. Abort!
1186 Handle<Object> obj =
1187 isolate->factory()->NewTypeError(
1188 "illegal_invocation", HandleVector(&function, 1));
1189 return isolate->Throw(*obj);
1192 Object* raw_call_data = fun_data->call_code();
1193 if (!raw_call_data->IsUndefined()) {
1194 CallHandlerInfo* call_data = CallHandlerInfo::cast(raw_call_data);
1195 Object* callback_obj = call_data->callback();
1196 v8::FunctionCallback callback =
1197 v8::ToCData<v8::FunctionCallback>(callback_obj);
1198 Object* data_obj = call_data->data();
1201 LOG(isolate, ApiObjectAccess("call", JSObject::cast(*args.receiver())));
1202 ASSERT(raw_holder->IsJSObject());
1204 FunctionCallbackArguments custom(isolate,
1212 v8::Handle<v8::Value> value = custom.Call(callback);
1213 if (value.IsEmpty()) {
1214 result = heap->undefined_value();
1216 result = *reinterpret_cast<Object**>(*value);
1217 result->VerifyApiCallResultType();
1220 RETURN_IF_SCHEDULED_EXCEPTION(isolate);
1221 if (!is_construct || result->IsJSObject()) return result;
1224 return *args.receiver();
1228 BUILTIN(HandleApiCall) {
1229 return HandleApiCallHelper<false>(args, isolate);
1233 BUILTIN(HandleApiCallConstruct) {
1234 return HandleApiCallHelper<true>(args, isolate);
1238 // Helper function to handle calls to non-function objects created through the
1239 // API. The object can be called as either a constructor (using new) or just as
1240 // a function (without new).
1241 MUST_USE_RESULT static MaybeObject* HandleApiCallAsFunctionOrConstructor(
1243 bool is_construct_call,
1244 BuiltinArguments<NO_EXTRA_ARGUMENTS> args) {
1245 // Non-functions are never called as constructors. Even if this is an object
1246 // called as a constructor the delegate call is not a construct call.
1247 ASSERT(!CalledAsConstructor(isolate));
1248 Heap* heap = isolate->heap();
1250 Handle<Object> receiver = args.receiver();
1252 // Get the object called.
1253 JSObject* obj = JSObject::cast(*receiver);
1255 // Get the invocation callback from the function descriptor that was
1256 // used to create the called object.
1257 ASSERT(obj->map()->has_instance_call_handler());
1258 JSFunction* constructor = JSFunction::cast(obj->map()->constructor());
1259 ASSERT(constructor->shared()->IsApiFunction());
1261 constructor->shared()->get_api_func_data()->instance_call_handler();
1262 ASSERT(!handler->IsUndefined());
1263 CallHandlerInfo* call_data = CallHandlerInfo::cast(handler);
1264 Object* callback_obj = call_data->callback();
1265 v8::FunctionCallback callback =
1266 v8::ToCData<v8::FunctionCallback>(callback_obj);
1268 // Get the data for the call and perform the callback.
1271 HandleScope scope(isolate);
1272 LOG(isolate, ApiObjectAccess("call non-function", obj));
1274 FunctionCallbackArguments custom(isolate,
1281 v8::Handle<v8::Value> value = custom.Call(callback);
1282 if (value.IsEmpty()) {
1283 result = heap->undefined_value();
1285 result = *reinterpret_cast<Object**>(*value);
1286 result->VerifyApiCallResultType();
1289 // Check for exceptions and return result.
1290 RETURN_IF_SCHEDULED_EXCEPTION(isolate);
1295 // Handle calls to non-function objects created through the API. This delegate
1296 // function is used when the call is a normal function call.
1297 BUILTIN(HandleApiCallAsFunction) {
1298 return HandleApiCallAsFunctionOrConstructor(isolate, false, args);
1302 // Handle calls to non-function objects created through the API. This delegate
1303 // function is used when the call is a construct call.
1304 BUILTIN(HandleApiCallAsConstructor) {
1305 return HandleApiCallAsFunctionOrConstructor(isolate, true, args);
1309 static void Generate_LoadIC_Miss(MacroAssembler* masm) {
1310 LoadIC::GenerateMiss(masm);
1314 static void Generate_LoadIC_Normal(MacroAssembler* masm) {
1315 LoadIC::GenerateNormal(masm);
1319 static void Generate_LoadIC_Getter_ForDeopt(MacroAssembler* masm) {
1320 LoadStubCompiler::GenerateLoadViaGetterForDeopt(masm);
1324 static void Generate_LoadIC_Slow(MacroAssembler* masm) {
1325 LoadIC::GenerateRuntimeGetProperty(masm);
1329 static void Generate_KeyedLoadIC_Initialize(MacroAssembler* masm) {
1330 KeyedLoadIC::GenerateInitialize(masm);
1334 static void Generate_KeyedLoadIC_Slow(MacroAssembler* masm) {
1335 KeyedLoadIC::GenerateRuntimeGetProperty(masm);
1339 static void Generate_KeyedLoadIC_Miss(MacroAssembler* masm) {
1340 KeyedLoadIC::GenerateMiss(masm);
1344 static void Generate_KeyedLoadIC_Generic(MacroAssembler* masm) {
1345 KeyedLoadIC::GenerateGeneric(masm);
1349 static void Generate_KeyedLoadIC_String(MacroAssembler* masm) {
1350 KeyedLoadIC::GenerateString(masm);
1354 static void Generate_KeyedLoadIC_PreMonomorphic(MacroAssembler* masm) {
1355 KeyedLoadIC::GeneratePreMonomorphic(masm);
1359 static void Generate_KeyedLoadIC_IndexedInterceptor(MacroAssembler* masm) {
1360 KeyedLoadIC::GenerateIndexedInterceptor(masm);
1364 static void Generate_KeyedLoadIC_SloppyArguments(MacroAssembler* masm) {
1365 KeyedLoadIC::GenerateSloppyArguments(masm);
1369 static void Generate_StoreIC_Slow(MacroAssembler* masm) {
1370 StoreIC::GenerateSlow(masm);
1374 static void Generate_StoreIC_Miss(MacroAssembler* masm) {
1375 StoreIC::GenerateMiss(masm);
1379 static void Generate_StoreIC_Normal(MacroAssembler* masm) {
1380 StoreIC::GenerateNormal(masm);
1384 static void Generate_StoreIC_Setter_ForDeopt(MacroAssembler* masm) {
1385 StoreStubCompiler::GenerateStoreViaSetterForDeopt(masm);
1389 static void Generate_KeyedStoreIC_Generic(MacroAssembler* masm) {
1390 KeyedStoreIC::GenerateGeneric(masm, SLOPPY);
1394 static void Generate_KeyedStoreIC_Generic_Strict(MacroAssembler* masm) {
1395 KeyedStoreIC::GenerateGeneric(masm, STRICT);
1399 static void Generate_KeyedStoreIC_Miss(MacroAssembler* masm) {
1400 KeyedStoreIC::GenerateMiss(masm);
1404 static void Generate_KeyedStoreIC_Slow(MacroAssembler* masm) {
1405 KeyedStoreIC::GenerateSlow(masm);
1409 static void Generate_KeyedStoreIC_Initialize(MacroAssembler* masm) {
1410 KeyedStoreIC::GenerateInitialize(masm);
1414 static void Generate_KeyedStoreIC_Initialize_Strict(MacroAssembler* masm) {
1415 KeyedStoreIC::GenerateInitialize(masm);
1419 static void Generate_KeyedStoreIC_PreMonomorphic(MacroAssembler* masm) {
1420 KeyedStoreIC::GeneratePreMonomorphic(masm);
1424 static void Generate_KeyedStoreIC_PreMonomorphic_Strict(MacroAssembler* masm) {
1425 KeyedStoreIC::GeneratePreMonomorphic(masm);
1429 static void Generate_KeyedStoreIC_SloppyArguments(MacroAssembler* masm) {
1430 KeyedStoreIC::GenerateSloppyArguments(masm);
1434 #ifdef ENABLE_DEBUGGER_SUPPORT
1435 static void Generate_LoadIC_DebugBreak(MacroAssembler* masm) {
1436 Debug::GenerateLoadICDebugBreak(masm);
1440 static void Generate_StoreIC_DebugBreak(MacroAssembler* masm) {
1441 Debug::GenerateStoreICDebugBreak(masm);
1445 static void Generate_KeyedLoadIC_DebugBreak(MacroAssembler* masm) {
1446 Debug::GenerateKeyedLoadICDebugBreak(masm);
1450 static void Generate_KeyedStoreIC_DebugBreak(MacroAssembler* masm) {
1451 Debug::GenerateKeyedStoreICDebugBreak(masm);
1455 static void Generate_CompareNilIC_DebugBreak(MacroAssembler* masm) {
1456 Debug::GenerateCompareNilICDebugBreak(masm);
1460 static void Generate_Return_DebugBreak(MacroAssembler* masm) {
1461 Debug::GenerateReturnDebugBreak(masm);
1465 static void Generate_CallFunctionStub_DebugBreak(MacroAssembler* masm) {
1466 Debug::GenerateCallFunctionStubDebugBreak(masm);
1470 static void Generate_CallFunctionStub_Recording_DebugBreak(
1471 MacroAssembler* masm) {
1472 Debug::GenerateCallFunctionStubRecordDebugBreak(masm);
1476 static void Generate_CallConstructStub_DebugBreak(MacroAssembler* masm) {
1477 Debug::GenerateCallConstructStubDebugBreak(masm);
1481 static void Generate_CallConstructStub_Recording_DebugBreak(
1482 MacroAssembler* masm) {
1483 Debug::GenerateCallConstructStubRecordDebugBreak(masm);
1487 static void Generate_Slot_DebugBreak(MacroAssembler* masm) {
1488 Debug::GenerateSlotDebugBreak(masm);
1492 static void Generate_PlainReturn_LiveEdit(MacroAssembler* masm) {
1493 Debug::GeneratePlainReturnLiveEdit(masm);
1497 static void Generate_FrameDropper_LiveEdit(MacroAssembler* masm) {
1498 Debug::GenerateFrameDropperLiveEdit(masm);
1503 Builtins::Builtins() : initialized_(false) {
1504 memset(builtins_, 0, sizeof(builtins_[0]) * builtin_count);
1505 memset(names_, 0, sizeof(names_[0]) * builtin_count);
1509 Builtins::~Builtins() {
1513 #define DEF_ENUM_C(name, ignore) FUNCTION_ADDR(Builtin_##name),
1514 Address const Builtins::c_functions_[cfunction_count] = {
1515 BUILTIN_LIST_C(DEF_ENUM_C)
1519 #define DEF_JS_NAME(name, ignore) #name,
1520 #define DEF_JS_ARGC(ignore, argc) argc,
1521 const char* const Builtins::javascript_names_[id_count] = {
1522 BUILTINS_LIST_JS(DEF_JS_NAME)
1525 int const Builtins::javascript_argc_[id_count] = {
1526 BUILTINS_LIST_JS(DEF_JS_ARGC)
1531 struct BuiltinDesc {
1534 const char* s_name; // name is only used for generating log information.
1537 BuiltinExtraArguments extra_args;
1540 #define BUILTIN_FUNCTION_TABLE_INIT { V8_ONCE_INIT, {} }
1542 class BuiltinFunctionTable {
1544 BuiltinDesc* functions() {
1545 CallOnce(&once_, &Builtins::InitBuiltinFunctionTable);
1550 BuiltinDesc functions_[Builtins::builtin_count + 1];
1552 friend class Builtins;
1555 static BuiltinFunctionTable builtin_function_table =
1556 BUILTIN_FUNCTION_TABLE_INIT;
1558 // Define array of pointers to generators and C builtin functions.
1559 // We do this in a sort of roundabout way so that we can do the initialization
1560 // within the lexical scope of Builtins:: and within a context where
1561 // Code::Flags names a non-abstract type.
1562 void Builtins::InitBuiltinFunctionTable() {
1563 BuiltinDesc* functions = builtin_function_table.functions_;
1564 functions[builtin_count].generator = NULL;
1565 functions[builtin_count].c_code = NULL;
1566 functions[builtin_count].s_name = NULL;
1567 functions[builtin_count].name = builtin_count;
1568 functions[builtin_count].flags = static_cast<Code::Flags>(0);
1569 functions[builtin_count].extra_args = NO_EXTRA_ARGUMENTS;
1571 #define DEF_FUNCTION_PTR_C(aname, aextra_args) \
1572 functions->generator = FUNCTION_ADDR(Generate_Adaptor); \
1573 functions->c_code = FUNCTION_ADDR(Builtin_##aname); \
1574 functions->s_name = #aname; \
1575 functions->name = c_##aname; \
1576 functions->flags = Code::ComputeFlags(Code::BUILTIN); \
1577 functions->extra_args = aextra_args; \
1580 #define DEF_FUNCTION_PTR_A(aname, kind, state, extra) \
1581 functions->generator = FUNCTION_ADDR(Generate_##aname); \
1582 functions->c_code = NULL; \
1583 functions->s_name = #aname; \
1584 functions->name = k##aname; \
1585 functions->flags = Code::ComputeFlags(Code::kind, \
1588 functions->extra_args = NO_EXTRA_ARGUMENTS; \
1591 #define DEF_FUNCTION_PTR_H(aname, kind) \
1592 functions->generator = FUNCTION_ADDR(Generate_##aname); \
1593 functions->c_code = NULL; \
1594 functions->s_name = #aname; \
1595 functions->name = k##aname; \
1596 functions->flags = Code::ComputeHandlerFlags(Code::kind); \
1597 functions->extra_args = NO_EXTRA_ARGUMENTS; \
1600 BUILTIN_LIST_C(DEF_FUNCTION_PTR_C)
1601 BUILTIN_LIST_A(DEF_FUNCTION_PTR_A)
1602 BUILTIN_LIST_H(DEF_FUNCTION_PTR_H)
1603 BUILTIN_LIST_DEBUG_A(DEF_FUNCTION_PTR_A)
1605 #undef DEF_FUNCTION_PTR_C
1606 #undef DEF_FUNCTION_PTR_A
1610 void Builtins::SetUp(Isolate* isolate, bool create_heap_objects) {
1611 ASSERT(!initialized_);
1612 Heap* heap = isolate->heap();
1614 // Create a scope for the handles in the builtins.
1615 HandleScope scope(isolate);
1617 const BuiltinDesc* functions = builtin_function_table.functions();
1619 // For now we generate builtin adaptor code into a stack-allocated
1620 // buffer, before copying it into individual code objects. Be careful
1621 // with alignment, some platforms don't like unaligned code.
1622 // TODO(jbramley): I had to increase the size of this buffer from 8KB because
1623 // we can generate a lot of debug code on ARM64.
1624 union { int force_alignment; byte buffer[16*KB]; } u;
1626 // Traverse the list of builtins and generate an adaptor in a
1627 // separate code object for each one.
1628 for (int i = 0; i < builtin_count; i++) {
1629 if (create_heap_objects) {
1630 MacroAssembler masm(isolate, u.buffer, sizeof u.buffer);
1631 // Generate the code/adaptor.
1632 typedef void (*Generator)(MacroAssembler*, int, BuiltinExtraArguments);
1633 Generator g = FUNCTION_CAST<Generator>(functions[i].generator);
1634 // We pass all arguments to the generator, but it may not use all of
1635 // them. This works because the first arguments are on top of the
1637 ASSERT(!masm.has_frame());
1638 g(&masm, functions[i].name, functions[i].extra_args);
1639 // Move the code into the object heap.
1641 masm.GetCode(&desc);
1642 Code::Flags flags = functions[i].flags;
1643 Object* code = NULL;
1645 // During startup it's OK to always allocate and defer GC to later.
1646 // This simplifies things because we don't need to retry.
1647 AlwaysAllocateScope __scope__(isolate);
1648 { MaybeObject* maybe_code =
1649 heap->CreateCode(desc, flags, masm.CodeObject());
1650 if (!maybe_code->ToObject(&code)) {
1651 v8::internal::V8::FatalProcessOutOfMemory("CreateCode");
1655 // Log the event and add the code to the builtins array.
1657 CodeCreateEvent(Logger::BUILTIN_TAG,
1659 functions[i].s_name));
1660 GDBJIT(AddCode(GDBJITInterface::BUILTIN,
1661 functions[i].s_name,
1663 builtins_[i] = code;
1664 #ifdef ENABLE_DISASSEMBLER
1665 if (FLAG_print_builtin_code) {
1666 CodeTracer::Scope trace_scope(isolate->GetCodeTracer());
1667 PrintF(trace_scope.file(), "Builtin: %s\n", functions[i].s_name);
1668 Code::cast(code)->Disassemble(functions[i].s_name, trace_scope.file());
1669 PrintF(trace_scope.file(), "\n");
1673 // Deserializing. The values will be filled in during IterateBuiltins.
1674 builtins_[i] = NULL;
1676 names_[i] = functions[i].s_name;
1679 // Mark as initialized.
1680 initialized_ = true;
1684 void Builtins::TearDown() {
1685 initialized_ = false;
1689 void Builtins::IterateBuiltins(ObjectVisitor* v) {
1690 v->VisitPointers(&builtins_[0], &builtins_[0] + builtin_count);
1694 const char* Builtins::Lookup(byte* pc) {
1695 // may be called during initialization (disassembler!)
1697 for (int i = 0; i < builtin_count; i++) {
1698 Code* entry = Code::cast(builtins_[i]);
1699 if (entry->contains(pc)) {
1708 void Builtins::Generate_InterruptCheck(MacroAssembler* masm) {
1709 masm->TailCallRuntime(Runtime::kHiddenInterrupt, 0, 1);
1713 void Builtins::Generate_StackCheck(MacroAssembler* masm) {
1714 masm->TailCallRuntime(Runtime::kHiddenStackGuard, 0, 1);
1718 #define DEFINE_BUILTIN_ACCESSOR_C(name, ignore) \
1719 Handle<Code> Builtins::name() { \
1720 Code** code_address = \
1721 reinterpret_cast<Code**>(builtin_address(k##name)); \
1722 return Handle<Code>(code_address); \
1724 #define DEFINE_BUILTIN_ACCESSOR_A(name, kind, state, extra) \
1725 Handle<Code> Builtins::name() { \
1726 Code** code_address = \
1727 reinterpret_cast<Code**>(builtin_address(k##name)); \
1728 return Handle<Code>(code_address); \
1730 #define DEFINE_BUILTIN_ACCESSOR_H(name, kind) \
1731 Handle<Code> Builtins::name() { \
1732 Code** code_address = \
1733 reinterpret_cast<Code**>(builtin_address(k##name)); \
1734 return Handle<Code>(code_address); \
1736 BUILTIN_LIST_C(DEFINE_BUILTIN_ACCESSOR_C)
1737 BUILTIN_LIST_A(DEFINE_BUILTIN_ACCESSOR_A)
1738 BUILTIN_LIST_H(DEFINE_BUILTIN_ACCESSOR_H)
1739 BUILTIN_LIST_DEBUG_A(DEFINE_BUILTIN_ACCESSOR_A)
1740 #undef DEFINE_BUILTIN_ACCESSOR_C
1741 #undef DEFINE_BUILTIN_ACCESSOR_A
1744 } } // namespace v8::internal