1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30 #if defined(V8_TARGET_ARCH_IA32)
32 #include "bootstrapper.h"
36 #include "serialize.h"
41 // -------------------------------------------------------------------------
42 // MacroAssembler implementation.
44 MacroAssembler::MacroAssembler(Isolate* arg_isolate, void* buffer, int size)
45 : Assembler(arg_isolate, buffer, size),
46 generating_stub_(false),
47 allow_stub_calls_(true),
49 if (isolate() != NULL) {
50 code_object_ = Handle<Object>(isolate()->heap()->undefined_value(),
56 void MacroAssembler::InNewSpace(
61 Label::Distance condition_met_distance) {
62 ASSERT(cc == equal || cc == not_equal);
63 if (scratch.is(object)) {
64 and_(scratch, Immediate(~Page::kPageAlignmentMask));
66 mov(scratch, Immediate(~Page::kPageAlignmentMask));
67 and_(scratch, object);
69 // Check that we can use a test_b.
70 ASSERT(MemoryChunk::IN_FROM_SPACE < 8);
71 ASSERT(MemoryChunk::IN_TO_SPACE < 8);
72 int mask = (1 << MemoryChunk::IN_FROM_SPACE)
73 | (1 << MemoryChunk::IN_TO_SPACE);
74 // If non-zero, the page belongs to new-space.
75 test_b(Operand(scratch, MemoryChunk::kFlagsOffset),
76 static_cast<uint8_t>(mask));
77 j(cc, condition_met, condition_met_distance);
81 void MacroAssembler::RememberedSetHelper(
82 Register object, // Only used for debug checks.
85 SaveFPRegsMode save_fp,
86 MacroAssembler::RememberedSetFinalAction and_then) {
88 if (emit_debug_code()) {
90 JumpIfNotInNewSpace(object, scratch, &ok, Label::kNear);
94 // Load store buffer top.
95 ExternalReference store_buffer =
96 ExternalReference::store_buffer_top(isolate());
97 mov(scratch, Operand::StaticVariable(store_buffer));
98 // Store pointer to buffer.
99 mov(Operand(scratch, 0), addr);
100 // Increment buffer top.
101 add(scratch, Immediate(kPointerSize));
102 // Write back new top of buffer.
103 mov(Operand::StaticVariable(store_buffer), scratch);
104 // Call stub on end of buffer.
105 // Check for end of buffer.
106 test(scratch, Immediate(StoreBuffer::kStoreBufferOverflowBit));
107 if (and_then == kReturnAtEnd) {
108 Label buffer_overflowed;
109 j(not_equal, &buffer_overflowed, Label::kNear);
111 bind(&buffer_overflowed);
113 ASSERT(and_then == kFallThroughAtEnd);
114 j(equal, &done, Label::kNear);
116 StoreBufferOverflowStub store_buffer_overflow =
117 StoreBufferOverflowStub(save_fp);
118 CallStub(&store_buffer_overflow);
119 if (and_then == kReturnAtEnd) {
122 ASSERT(and_then == kFallThroughAtEnd);
128 void MacroAssembler::ClampDoubleToUint8(XMMRegister input_reg,
129 XMMRegister scratch_reg,
130 Register result_reg) {
133 pxor(scratch_reg, scratch_reg);
134 cvtsd2si(result_reg, input_reg);
135 test(result_reg, Immediate(0xFFFFFF00));
136 j(zero, &done, Label::kNear);
137 cmp(result_reg, Immediate(0x80000000));
138 j(equal, &conv_failure, Label::kNear);
139 mov(result_reg, Immediate(0));
140 setcc(above, result_reg);
141 sub(result_reg, Immediate(1));
142 and_(result_reg, Immediate(255));
143 jmp(&done, Label::kNear);
145 Set(result_reg, Immediate(0));
146 ucomisd(input_reg, scratch_reg);
147 j(below, &done, Label::kNear);
148 Set(result_reg, Immediate(255));
153 void MacroAssembler::ClampUint8(Register reg) {
155 test(reg, Immediate(0xFFFFFF00));
156 j(zero, &done, Label::kNear);
157 setcc(negative, reg); // 1 if negative, 0 if positive.
158 dec_b(reg); // 0 if negative, 255 if positive.
163 static double kUint32Bias =
164 static_cast<double>(static_cast<uint32_t>(0xFFFFFFFF)) + 1;
167 void MacroAssembler::LoadUint32(XMMRegister dst,
169 XMMRegister scratch) {
171 cmp(src, Immediate(0));
173 Operand(reinterpret_cast<int32_t>(&kUint32Bias), RelocInfo::NONE));
175 j(not_sign, &done, Label::kNear);
181 void MacroAssembler::RecordWriteArray(Register object,
184 SaveFPRegsMode save_fp,
185 RememberedSetAction remembered_set_action,
186 SmiCheck smi_check) {
187 // First, check if a write barrier is even needed. The tests below
188 // catch stores of Smis.
191 // Skip barrier if writing a smi.
192 if (smi_check == INLINE_SMI_CHECK) {
193 ASSERT_EQ(0, kSmiTag);
194 test(value, Immediate(kSmiTagMask));
198 // Array access: calculate the destination address in the same manner as
199 // KeyedStoreIC::GenerateGeneric. Multiply a smi by 2 to get an offset
200 // into an array of words.
201 Register dst = index;
202 lea(dst, Operand(object, index, times_half_pointer_size,
203 FixedArray::kHeaderSize - kHeapObjectTag));
206 object, dst, value, save_fp, remembered_set_action, OMIT_SMI_CHECK);
210 // Clobber clobbered input registers when running with the debug-code flag
211 // turned on to provoke errors.
212 if (emit_debug_code()) {
213 mov(value, Immediate(BitCast<int32_t>(kZapValue)));
214 mov(index, Immediate(BitCast<int32_t>(kZapValue)));
219 void MacroAssembler::RecordWriteField(
224 SaveFPRegsMode save_fp,
225 RememberedSetAction remembered_set_action,
226 SmiCheck smi_check) {
227 // First, check if a write barrier is even needed. The tests below
228 // catch stores of Smis.
231 // Skip barrier if writing a smi.
232 if (smi_check == INLINE_SMI_CHECK) {
233 JumpIfSmi(value, &done, Label::kNear);
236 // Although the object register is tagged, the offset is relative to the start
237 // of the object, so so offset must be a multiple of kPointerSize.
238 ASSERT(IsAligned(offset, kPointerSize));
240 lea(dst, FieldOperand(object, offset));
241 if (emit_debug_code()) {
243 test_b(dst, (1 << kPointerSizeLog2) - 1);
244 j(zero, &ok, Label::kNear);
250 object, dst, value, save_fp, remembered_set_action, OMIT_SMI_CHECK);
254 // Clobber clobbered input registers when running with the debug-code flag
255 // turned on to provoke errors.
256 if (emit_debug_code()) {
257 mov(value, Immediate(BitCast<int32_t>(kZapValue)));
258 mov(dst, Immediate(BitCast<int32_t>(kZapValue)));
263 void MacroAssembler::RecordWriteForMap(
268 SaveFPRegsMode save_fp) {
271 Register address = scratch1;
272 Register value = scratch2;
273 if (emit_debug_code()) {
275 lea(address, FieldOperand(object, HeapObject::kMapOffset));
276 test_b(address, (1 << kPointerSizeLog2) - 1);
277 j(zero, &ok, Label::kNear);
282 ASSERT(!object.is(value));
283 ASSERT(!object.is(address));
284 ASSERT(!value.is(address));
285 AssertNotSmi(object);
287 if (!FLAG_incremental_marking) {
291 // A single check of the map's pages interesting flag suffices, since it is
292 // only set during incremental collection, and then it's also guaranteed that
293 // the from object's page's interesting flag is also set. This optimization
294 // relies on the fact that maps can never be in new space.
295 ASSERT(!isolate()->heap()->InNewSpace(*map));
296 CheckPageFlagForMap(map,
297 MemoryChunk::kPointersToHereAreInterestingMask,
302 // Delay the initialization of |address| and |value| for the stub until it's
303 // known that the will be needed. Up until this point their values are not
304 // needed since they are embedded in the operands of instructions that need
306 lea(address, FieldOperand(object, HeapObject::kMapOffset));
307 mov(value, Immediate(map));
308 RecordWriteStub stub(object, value, address, OMIT_REMEMBERED_SET, save_fp);
313 // Clobber clobbered input registers when running with the debug-code flag
314 // turned on to provoke errors.
315 if (emit_debug_code()) {
316 mov(value, Immediate(BitCast<int32_t>(kZapValue)));
317 mov(scratch1, Immediate(BitCast<int32_t>(kZapValue)));
318 mov(scratch2, Immediate(BitCast<int32_t>(kZapValue)));
323 void MacroAssembler::RecordWrite(Register object,
326 SaveFPRegsMode fp_mode,
327 RememberedSetAction remembered_set_action,
328 SmiCheck smi_check) {
329 ASSERT(!object.is(value));
330 ASSERT(!object.is(address));
331 ASSERT(!value.is(address));
332 AssertNotSmi(object);
334 if (remembered_set_action == OMIT_REMEMBERED_SET &&
335 !FLAG_incremental_marking) {
339 if (emit_debug_code()) {
341 cmp(value, Operand(address, 0));
342 j(equal, &ok, Label::kNear);
347 // First, check if a write barrier is even needed. The tests below
348 // catch stores of Smis and stores into young gen.
351 if (smi_check == INLINE_SMI_CHECK) {
352 // Skip barrier if writing a smi.
353 JumpIfSmi(value, &done, Label::kNear);
357 value, // Used as scratch.
358 MemoryChunk::kPointersToHereAreInterestingMask,
362 CheckPageFlag(object,
363 value, // Used as scratch.
364 MemoryChunk::kPointersFromHereAreInterestingMask,
369 RecordWriteStub stub(object, value, address, remembered_set_action, fp_mode);
374 // Clobber clobbered registers when running with the debug-code flag
375 // turned on to provoke errors.
376 if (emit_debug_code()) {
377 mov(address, Immediate(BitCast<int32_t>(kZapValue)));
378 mov(value, Immediate(BitCast<int32_t>(kZapValue)));
383 #ifdef ENABLE_DEBUGGER_SUPPORT
384 void MacroAssembler::DebugBreak() {
385 Set(eax, Immediate(0));
386 mov(ebx, Immediate(ExternalReference(Runtime::kDebugBreak, isolate())));
388 call(ces.GetCode(), RelocInfo::DEBUG_BREAK);
393 void MacroAssembler::Set(Register dst, const Immediate& x) {
395 xor_(dst, dst); // Shorter than mov.
402 void MacroAssembler::Set(const Operand& dst, const Immediate& x) {
407 bool MacroAssembler::IsUnsafeImmediate(const Immediate& x) {
408 static const int kMaxImmediateBits = 17;
409 if (x.rmode_ != RelocInfo::NONE) return false;
410 return !is_intn(x.x_, kMaxImmediateBits);
414 void MacroAssembler::SafeSet(Register dst, const Immediate& x) {
415 if (IsUnsafeImmediate(x) && jit_cookie() != 0) {
416 Set(dst, Immediate(x.x_ ^ jit_cookie()));
417 xor_(dst, jit_cookie());
424 void MacroAssembler::SafePush(const Immediate& x) {
425 if (IsUnsafeImmediate(x) && jit_cookie() != 0) {
426 push(Immediate(x.x_ ^ jit_cookie()));
427 xor_(Operand(esp, 0), Immediate(jit_cookie()));
434 void MacroAssembler::CompareRoot(Register with, Heap::RootListIndex index) {
435 // see ROOT_ACCESSOR macro in factory.h
436 Handle<Object> value(&isolate()->heap()->roots_array_start()[index]);
441 void MacroAssembler::CompareRoot(const Operand& with,
442 Heap::RootListIndex index) {
443 // see ROOT_ACCESSOR macro in factory.h
444 Handle<Object> value(&isolate()->heap()->roots_array_start()[index]);
449 void MacroAssembler::CmpObjectType(Register heap_object,
452 mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
453 CmpInstanceType(map, type);
457 void MacroAssembler::CmpInstanceType(Register map, InstanceType type) {
458 cmpb(FieldOperand(map, Map::kInstanceTypeOffset),
459 static_cast<int8_t>(type));
463 void MacroAssembler::CheckFastElements(Register map,
465 Label::Distance distance) {
466 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
467 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
468 STATIC_ASSERT(FAST_ELEMENTS == 2);
469 STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3);
470 cmpb(FieldOperand(map, Map::kBitField2Offset),
471 Map::kMaximumBitField2FastHoleyElementValue);
472 j(above, fail, distance);
476 void MacroAssembler::CheckFastObjectElements(Register map,
478 Label::Distance distance) {
479 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
480 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
481 STATIC_ASSERT(FAST_ELEMENTS == 2);
482 STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3);
483 cmpb(FieldOperand(map, Map::kBitField2Offset),
484 Map::kMaximumBitField2FastHoleySmiElementValue);
485 j(below_equal, fail, distance);
486 cmpb(FieldOperand(map, Map::kBitField2Offset),
487 Map::kMaximumBitField2FastHoleyElementValue);
488 j(above, fail, distance);
492 void MacroAssembler::CheckFastSmiElements(Register map,
494 Label::Distance distance) {
495 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
496 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
497 cmpb(FieldOperand(map, Map::kBitField2Offset),
498 Map::kMaximumBitField2FastHoleySmiElementValue);
499 j(above, fail, distance);
503 void MacroAssembler::StoreNumberToDoubleElements(
504 Register maybe_number,
508 XMMRegister scratch2,
510 bool specialize_for_processor,
511 int elements_offset) {
512 Label smi_value, done, maybe_nan, not_nan, is_nan, have_double_value;
513 JumpIfSmi(maybe_number, &smi_value, Label::kNear);
515 CheckMap(maybe_number,
516 isolate()->factory()->heap_number_map(),
520 // Double value, canonicalize NaN.
521 uint32_t offset = HeapNumber::kValueOffset + sizeof(kHoleNanLower32);
522 cmp(FieldOperand(maybe_number, offset),
523 Immediate(kNaNOrInfinityLowerBoundUpper32));
524 j(greater_equal, &maybe_nan, Label::kNear);
527 ExternalReference canonical_nan_reference =
528 ExternalReference::address_of_canonical_non_hole_nan();
529 if (CpuFeatures::IsSupported(SSE2) && specialize_for_processor) {
530 CpuFeatures::Scope use_sse2(SSE2);
531 movdbl(scratch2, FieldOperand(maybe_number, HeapNumber::kValueOffset));
532 bind(&have_double_value);
533 movdbl(FieldOperand(elements, key, times_4,
534 FixedDoubleArray::kHeaderSize - elements_offset),
537 fld_d(FieldOperand(maybe_number, HeapNumber::kValueOffset));
538 bind(&have_double_value);
539 fstp_d(FieldOperand(elements, key, times_4,
540 FixedDoubleArray::kHeaderSize - elements_offset));
545 // Could be NaN or Infinity. If fraction is not zero, it's NaN, otherwise
546 // it's an Infinity, and the non-NaN code path applies.
547 j(greater, &is_nan, Label::kNear);
548 cmp(FieldOperand(maybe_number, HeapNumber::kValueOffset), Immediate(0));
551 if (CpuFeatures::IsSupported(SSE2) && specialize_for_processor) {
552 CpuFeatures::Scope use_sse2(SSE2);
553 movdbl(scratch2, Operand::StaticVariable(canonical_nan_reference));
555 fld_d(Operand::StaticVariable(canonical_nan_reference));
557 jmp(&have_double_value, Label::kNear);
560 // Value is a smi. Convert to a double and store.
561 // Preserve original value.
562 mov(scratch1, maybe_number);
564 if (CpuFeatures::IsSupported(SSE2) && specialize_for_processor) {
565 CpuFeatures::Scope fscope(SSE2);
566 cvtsi2sd(scratch2, scratch1);
567 movdbl(FieldOperand(elements, key, times_4,
568 FixedDoubleArray::kHeaderSize - elements_offset),
572 fild_s(Operand(esp, 0));
574 fstp_d(FieldOperand(elements, key, times_4,
575 FixedDoubleArray::kHeaderSize - elements_offset));
581 void MacroAssembler::CompareMap(Register obj,
583 Label* early_success,
584 CompareMapMode mode) {
585 cmp(FieldOperand(obj, HeapObject::kMapOffset), map);
586 if (mode == ALLOW_ELEMENT_TRANSITION_MAPS) {
587 ElementsKind kind = map->elements_kind();
588 if (IsFastElementsKind(kind)) {
589 bool packed = IsFastPackedElementsKind(kind);
590 Map* current_map = *map;
591 while (CanTransitionToMoreGeneralFastElementsKind(kind, packed)) {
592 kind = GetNextMoreGeneralFastElementsKind(kind, packed);
593 current_map = current_map->LookupElementsTransitionMap(kind);
594 if (!current_map) break;
595 j(equal, early_success, Label::kNear);
596 cmp(FieldOperand(obj, HeapObject::kMapOffset),
597 Handle<Map>(current_map));
604 void MacroAssembler::CheckMap(Register obj,
607 SmiCheckType smi_check_type,
608 CompareMapMode mode) {
609 if (smi_check_type == DO_SMI_CHECK) {
610 JumpIfSmi(obj, fail);
614 CompareMap(obj, map, &success, mode);
620 void MacroAssembler::DispatchMap(Register obj,
622 Handle<Code> success,
623 SmiCheckType smi_check_type) {
625 if (smi_check_type == DO_SMI_CHECK) {
626 JumpIfSmi(obj, &fail);
628 cmp(FieldOperand(obj, HeapObject::kMapOffset), Immediate(map));
635 Condition MacroAssembler::IsObjectStringType(Register heap_object,
637 Register instance_type) {
638 mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
639 movzx_b(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
640 STATIC_ASSERT(kNotStringTag != 0);
641 test(instance_type, Immediate(kIsNotStringMask));
646 void MacroAssembler::IsObjectJSObjectType(Register heap_object,
650 mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
651 IsInstanceJSObjectType(map, scratch, fail);
655 void MacroAssembler::IsInstanceJSObjectType(Register map,
658 movzx_b(scratch, FieldOperand(map, Map::kInstanceTypeOffset));
659 sub(scratch, Immediate(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
661 LAST_NONCALLABLE_SPEC_OBJECT_TYPE - FIRST_NONCALLABLE_SPEC_OBJECT_TYPE);
666 void MacroAssembler::FCmp() {
667 if (CpuFeatures::IsSupported(CMOV)) {
680 void MacroAssembler::AssertNumber(Register object) {
681 if (emit_debug_code()) {
683 JumpIfSmi(object, &ok);
684 cmp(FieldOperand(object, HeapObject::kMapOffset),
685 isolate()->factory()->heap_number_map());
686 Check(equal, "Operand not a number");
692 void MacroAssembler::AssertSmi(Register object) {
693 if (emit_debug_code()) {
694 test(object, Immediate(kSmiTagMask));
695 Check(equal, "Operand is not a smi");
700 void MacroAssembler::AssertString(Register object) {
701 if (emit_debug_code()) {
702 test(object, Immediate(kSmiTagMask));
703 Check(not_equal, "Operand is a smi and not a string");
705 mov(object, FieldOperand(object, HeapObject::kMapOffset));
706 CmpInstanceType(object, FIRST_NONSTRING_TYPE);
708 Check(below, "Operand is not a string");
713 void MacroAssembler::AssertNotSmi(Register object) {
714 if (emit_debug_code()) {
715 test(object, Immediate(kSmiTagMask));
716 Check(not_equal, "Operand is a smi");
721 void MacroAssembler::EnterFrame(StackFrame::Type type) {
725 push(Immediate(Smi::FromInt(type)));
726 push(Immediate(CodeObject()));
727 if (emit_debug_code()) {
728 cmp(Operand(esp, 0), Immediate(isolate()->factory()->undefined_value()));
729 Check(not_equal, "code object not properly patched");
734 void MacroAssembler::LeaveFrame(StackFrame::Type type) {
735 if (emit_debug_code()) {
736 cmp(Operand(ebp, StandardFrameConstants::kMarkerOffset),
737 Immediate(Smi::FromInt(type)));
738 Check(equal, "stack frame types must match");
744 void MacroAssembler::EnterExitFramePrologue() {
745 // Set up the frame structure on the stack.
746 ASSERT(ExitFrameConstants::kCallerSPDisplacement == +2 * kPointerSize);
747 ASSERT(ExitFrameConstants::kCallerPCOffset == +1 * kPointerSize);
748 ASSERT(ExitFrameConstants::kCallerFPOffset == 0 * kPointerSize);
752 // Reserve room for entry stack pointer and push the code object.
753 ASSERT(ExitFrameConstants::kSPOffset == -1 * kPointerSize);
754 push(Immediate(0)); // Saved entry sp, patched before call.
755 push(Immediate(CodeObject())); // Accessed from ExitFrame::code_slot.
757 // Save the frame pointer and the context in top.
758 ExternalReference c_entry_fp_address(Isolate::kCEntryFPAddress,
760 ExternalReference context_address(Isolate::kContextAddress,
762 mov(Operand::StaticVariable(c_entry_fp_address), ebp);
763 mov(Operand::StaticVariable(context_address), esi);
767 void MacroAssembler::EnterExitFrameEpilogue(int argc, bool save_doubles) {
768 // Optionally save all XMM registers.
770 CpuFeatures::Scope scope(SSE2);
771 int space = XMMRegister::kNumRegisters * kDoubleSize + argc * kPointerSize;
772 sub(esp, Immediate(space));
773 const int offset = -2 * kPointerSize;
774 for (int i = 0; i < XMMRegister::kNumRegisters; i++) {
775 XMMRegister reg = XMMRegister::from_code(i);
776 movdbl(Operand(ebp, offset - ((i + 1) * kDoubleSize)), reg);
779 sub(esp, Immediate(argc * kPointerSize));
782 // Get the required frame alignment for the OS.
783 const int kFrameAlignment = OS::ActivationFrameAlignment();
784 if (kFrameAlignment > 0) {
785 ASSERT(IsPowerOf2(kFrameAlignment));
786 and_(esp, -kFrameAlignment);
789 // Patch the saved entry sp.
790 mov(Operand(ebp, ExitFrameConstants::kSPOffset), esp);
794 void MacroAssembler::EnterExitFrame(bool save_doubles) {
795 EnterExitFramePrologue();
797 // Set up argc and argv in callee-saved registers.
798 int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
800 lea(esi, Operand(ebp, eax, times_4, offset));
802 // Reserve space for argc, argv and isolate.
803 EnterExitFrameEpilogue(3, save_doubles);
807 void MacroAssembler::EnterApiExitFrame(int argc) {
808 EnterExitFramePrologue();
809 EnterExitFrameEpilogue(argc, false);
813 void MacroAssembler::LeaveExitFrame(bool save_doubles) {
814 // Optionally restore all XMM registers.
816 CpuFeatures::Scope scope(SSE2);
817 const int offset = -2 * kPointerSize;
818 for (int i = 0; i < XMMRegister::kNumRegisters; i++) {
819 XMMRegister reg = XMMRegister::from_code(i);
820 movdbl(reg, Operand(ebp, offset - ((i + 1) * kDoubleSize)));
824 // Get the return address from the stack and restore the frame pointer.
825 mov(ecx, Operand(ebp, 1 * kPointerSize));
826 mov(ebp, Operand(ebp, 0 * kPointerSize));
828 // Pop the arguments and the receiver from the caller stack.
829 lea(esp, Operand(esi, 1 * kPointerSize));
831 // Push the return address to get ready to return.
834 LeaveExitFrameEpilogue();
837 void MacroAssembler::LeaveExitFrameEpilogue() {
838 // Restore current context from top and clear it in debug mode.
839 ExternalReference context_address(Isolate::kContextAddress, isolate());
840 mov(esi, Operand::StaticVariable(context_address));
842 mov(Operand::StaticVariable(context_address), Immediate(0));
845 // Clear the top frame.
846 ExternalReference c_entry_fp_address(Isolate::kCEntryFPAddress,
848 mov(Operand::StaticVariable(c_entry_fp_address), Immediate(0));
852 void MacroAssembler::LeaveApiExitFrame() {
856 LeaveExitFrameEpilogue();
860 void MacroAssembler::PushTryHandler(StackHandler::Kind kind,
862 // Adjust this code if not the case.
863 STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize);
864 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
865 STATIC_ASSERT(StackHandlerConstants::kCodeOffset == 1 * kPointerSize);
866 STATIC_ASSERT(StackHandlerConstants::kStateOffset == 2 * kPointerSize);
867 STATIC_ASSERT(StackHandlerConstants::kContextOffset == 3 * kPointerSize);
868 STATIC_ASSERT(StackHandlerConstants::kFPOffset == 4 * kPointerSize);
870 // We will build up the handler from the bottom by pushing on the stack.
871 // First push the frame pointer and context.
872 if (kind == StackHandler::JS_ENTRY) {
873 // The frame pointer does not point to a JS frame so we save NULL for
874 // ebp. We expect the code throwing an exception to check ebp before
875 // dereferencing it to restore the context.
876 push(Immediate(0)); // NULL frame pointer.
877 push(Immediate(Smi::FromInt(0))); // No context.
882 // Push the state and the code object.
884 StackHandler::IndexField::encode(handler_index) |
885 StackHandler::KindField::encode(kind);
886 push(Immediate(state));
889 // Link the current handler as the next handler.
890 ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
891 push(Operand::StaticVariable(handler_address));
892 // Set this new handler as the current one.
893 mov(Operand::StaticVariable(handler_address), esp);
897 void MacroAssembler::PopTryHandler() {
898 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
899 ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
900 pop(Operand::StaticVariable(handler_address));
901 add(esp, Immediate(StackHandlerConstants::kSize - kPointerSize));
905 void MacroAssembler::JumpToHandlerEntry() {
906 // Compute the handler entry address and jump to it. The handler table is
907 // a fixed array of (smi-tagged) code offsets.
908 // eax = exception, edi = code object, edx = state.
909 mov(ebx, FieldOperand(edi, Code::kHandlerTableOffset));
910 shr(edx, StackHandler::kKindWidth);
911 mov(edx, FieldOperand(ebx, edx, times_4, FixedArray::kHeaderSize));
913 lea(edi, FieldOperand(edi, edx, times_1, Code::kHeaderSize));
918 void MacroAssembler::Throw(Register value) {
919 // Adjust this code if not the case.
920 STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize);
921 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
922 STATIC_ASSERT(StackHandlerConstants::kCodeOffset == 1 * kPointerSize);
923 STATIC_ASSERT(StackHandlerConstants::kStateOffset == 2 * kPointerSize);
924 STATIC_ASSERT(StackHandlerConstants::kContextOffset == 3 * kPointerSize);
925 STATIC_ASSERT(StackHandlerConstants::kFPOffset == 4 * kPointerSize);
927 // The exception is expected in eax.
928 if (!value.is(eax)) {
931 // Drop the stack pointer to the top of the top handler.
932 ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
933 mov(esp, Operand::StaticVariable(handler_address));
934 // Restore the next handler.
935 pop(Operand::StaticVariable(handler_address));
937 // Remove the code object and state, compute the handler address in edi.
938 pop(edi); // Code object.
939 pop(edx); // Index and state.
941 // Restore the context and frame pointer.
942 pop(esi); // Context.
943 pop(ebp); // Frame pointer.
945 // If the handler is a JS frame, restore the context to the frame.
946 // (kind == ENTRY) == (ebp == 0) == (esi == 0), so we could test either
950 j(zero, &skip, Label::kNear);
951 mov(Operand(ebp, StandardFrameConstants::kContextOffset), esi);
954 JumpToHandlerEntry();
958 void MacroAssembler::ThrowUncatchable(Register value) {
959 // Adjust this code if not the case.
960 STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize);
961 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
962 STATIC_ASSERT(StackHandlerConstants::kCodeOffset == 1 * kPointerSize);
963 STATIC_ASSERT(StackHandlerConstants::kStateOffset == 2 * kPointerSize);
964 STATIC_ASSERT(StackHandlerConstants::kContextOffset == 3 * kPointerSize);
965 STATIC_ASSERT(StackHandlerConstants::kFPOffset == 4 * kPointerSize);
967 // The exception is expected in eax.
968 if (!value.is(eax)) {
971 // Drop the stack pointer to the top of the top stack handler.
972 ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
973 mov(esp, Operand::StaticVariable(handler_address));
975 // Unwind the handlers until the top ENTRY handler is found.
976 Label fetch_next, check_kind;
977 jmp(&check_kind, Label::kNear);
979 mov(esp, Operand(esp, StackHandlerConstants::kNextOffset));
982 STATIC_ASSERT(StackHandler::JS_ENTRY == 0);
983 test(Operand(esp, StackHandlerConstants::kStateOffset),
984 Immediate(StackHandler::KindField::kMask));
985 j(not_zero, &fetch_next);
987 // Set the top handler address to next handler past the top ENTRY handler.
988 pop(Operand::StaticVariable(handler_address));
990 // Remove the code object and state, compute the handler address in edi.
991 pop(edi); // Code object.
992 pop(edx); // Index and state.
994 // Clear the context pointer and frame pointer (0 was saved in the handler).
998 JumpToHandlerEntry();
1002 void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
1005 Label same_contexts;
1007 ASSERT(!holder_reg.is(scratch));
1009 // Load current lexical context from the stack frame.
1010 mov(scratch, Operand(ebp, StandardFrameConstants::kContextOffset));
1012 // When generating debug code, make sure the lexical context is set.
1013 if (emit_debug_code()) {
1014 cmp(scratch, Immediate(0));
1015 Check(not_equal, "we should not have an empty lexical context");
1017 // Load the native context of the current context.
1019 Context::kHeaderSize + Context::GLOBAL_OBJECT_INDEX * kPointerSize;
1020 mov(scratch, FieldOperand(scratch, offset));
1021 mov(scratch, FieldOperand(scratch, GlobalObject::kNativeContextOffset));
1023 // Check the context is a native context.
1024 if (emit_debug_code()) {
1026 // Read the first word and compare to native_context_map.
1027 mov(scratch, FieldOperand(scratch, HeapObject::kMapOffset));
1028 cmp(scratch, isolate()->factory()->native_context_map());
1029 Check(equal, "JSGlobalObject::native_context should be a native context.");
1033 // Check if both contexts are the same.
1034 cmp(scratch, FieldOperand(holder_reg, JSGlobalProxy::kNativeContextOffset));
1035 j(equal, &same_contexts);
1037 // Compare security tokens, save holder_reg on the stack so we can use it
1038 // as a temporary register.
1040 // TODO(119): avoid push(holder_reg)/pop(holder_reg)
1042 // Check that the security token in the calling global object is
1043 // compatible with the security token in the receiving global
1046 FieldOperand(holder_reg, JSGlobalProxy::kNativeContextOffset));
1048 // Check the context is a native context.
1049 if (emit_debug_code()) {
1050 cmp(holder_reg, isolate()->factory()->null_value());
1051 Check(not_equal, "JSGlobalProxy::context() should not be null.");
1054 // Read the first word and compare to native_context_map(),
1055 mov(holder_reg, FieldOperand(holder_reg, HeapObject::kMapOffset));
1056 cmp(holder_reg, isolate()->factory()->native_context_map());
1057 Check(equal, "JSGlobalObject::native_context should be a native context.");
1061 int token_offset = Context::kHeaderSize +
1062 Context::SECURITY_TOKEN_INDEX * kPointerSize;
1063 mov(scratch, FieldOperand(scratch, token_offset));
1064 cmp(scratch, FieldOperand(holder_reg, token_offset));
1068 bind(&same_contexts);
1072 // Compute the hash code from the untagged key. This must be kept in sync
1073 // with ComputeIntegerHash in utils.h.
1075 // Note: r0 will contain hash code
1076 void MacroAssembler::GetNumberHash(Register r0, Register scratch) {
1077 // Xor original key with a seed.
1078 if (Serializer::enabled()) {
1079 ExternalReference roots_array_start =
1080 ExternalReference::roots_array_start(isolate());
1081 mov(scratch, Immediate(Heap::kHashSeedRootIndex));
1083 Operand::StaticArray(scratch, times_pointer_size, roots_array_start));
1087 int32_t seed = isolate()->heap()->HashSeed();
1088 xor_(r0, Immediate(seed));
1091 // hash = ~hash + (hash << 15);
1096 // hash = hash ^ (hash >> 12);
1100 // hash = hash + (hash << 2);
1101 lea(r0, Operand(r0, r0, times_4, 0));
1102 // hash = hash ^ (hash >> 4);
1106 // hash = hash * 2057;
1108 // hash = hash ^ (hash >> 16);
1116 void MacroAssembler::LoadFromNumberDictionary(Label* miss,
1125 // elements - holds the slow-case elements of the receiver and is unchanged.
1127 // key - holds the smi key on entry and is unchanged.
1129 // Scratch registers:
1131 // r0 - holds the untagged key on entry and holds the hash once computed.
1133 // r1 - used to hold the capacity mask of the dictionary
1135 // r2 - used for the index into the dictionary.
1137 // result - holds the result on exit if the load succeeds and we fall through.
1141 GetNumberHash(r0, r1);
1143 // Compute capacity mask.
1144 mov(r1, FieldOperand(elements, SeededNumberDictionary::kCapacityOffset));
1145 shr(r1, kSmiTagSize); // convert smi to int
1148 // Generate an unrolled loop that performs a few probes before giving up.
1149 const int kProbes = 4;
1150 for (int i = 0; i < kProbes; i++) {
1151 // Use r2 for index calculations and keep the hash intact in r0.
1153 // Compute the masked index: (hash + i + i * i) & mask.
1155 add(r2, Immediate(SeededNumberDictionary::GetProbeOffset(i)));
1159 // Scale the index by multiplying by the entry size.
1160 ASSERT(SeededNumberDictionary::kEntrySize == 3);
1161 lea(r2, Operand(r2, r2, times_2, 0)); // r2 = r2 * 3
1163 // Check if the key matches.
1164 cmp(key, FieldOperand(elements,
1167 SeededNumberDictionary::kElementsStartOffset));
1168 if (i != (kProbes - 1)) {
1176 // Check that the value is a normal propety.
1177 const int kDetailsOffset =
1178 SeededNumberDictionary::kElementsStartOffset + 2 * kPointerSize;
1179 ASSERT_EQ(NORMAL, 0);
1180 test(FieldOperand(elements, r2, times_pointer_size, kDetailsOffset),
1181 Immediate(PropertyDetails::TypeField::kMask << kSmiTagSize));
1184 // Get the value at the masked, scaled index.
1185 const int kValueOffset =
1186 SeededNumberDictionary::kElementsStartOffset + kPointerSize;
1187 mov(result, FieldOperand(elements, r2, times_pointer_size, kValueOffset));
1191 void MacroAssembler::LoadAllocationTopHelper(Register result,
1193 AllocationFlags flags) {
1194 ExternalReference new_space_allocation_top =
1195 ExternalReference::new_space_allocation_top_address(isolate());
1197 // Just return if allocation top is already known.
1198 if ((flags & RESULT_CONTAINS_TOP) != 0) {
1199 // No use of scratch if allocation top is provided.
1200 ASSERT(scratch.is(no_reg));
1202 // Assert that result actually contains top on entry.
1203 cmp(result, Operand::StaticVariable(new_space_allocation_top));
1204 Check(equal, "Unexpected allocation top");
1209 // Move address of new object to result. Use scratch register if available.
1210 if (scratch.is(no_reg)) {
1211 mov(result, Operand::StaticVariable(new_space_allocation_top));
1213 mov(scratch, Immediate(new_space_allocation_top));
1214 mov(result, Operand(scratch, 0));
1219 void MacroAssembler::UpdateAllocationTopHelper(Register result_end,
1221 if (emit_debug_code()) {
1222 test(result_end, Immediate(kObjectAlignmentMask));
1223 Check(zero, "Unaligned allocation in new space");
1226 ExternalReference new_space_allocation_top =
1227 ExternalReference::new_space_allocation_top_address(isolate());
1229 // Update new top. Use scratch if available.
1230 if (scratch.is(no_reg)) {
1231 mov(Operand::StaticVariable(new_space_allocation_top), result_end);
1233 mov(Operand(scratch, 0), result_end);
1238 void MacroAssembler::AllocateInNewSpace(int object_size,
1240 Register result_end,
1243 AllocationFlags flags) {
1244 if (!FLAG_inline_new) {
1245 if (emit_debug_code()) {
1246 // Trash the registers to simulate an allocation failure.
1247 mov(result, Immediate(0x7091));
1248 if (result_end.is_valid()) {
1249 mov(result_end, Immediate(0x7191));
1251 if (scratch.is_valid()) {
1252 mov(scratch, Immediate(0x7291));
1258 ASSERT(!result.is(result_end));
1260 // Load address of new object into result.
1261 LoadAllocationTopHelper(result, scratch, flags);
1263 Register top_reg = result_end.is_valid() ? result_end : result;
1265 // Calculate new top and bail out if new space is exhausted.
1266 ExternalReference new_space_allocation_limit =
1267 ExternalReference::new_space_allocation_limit_address(isolate());
1269 if (!top_reg.is(result)) {
1270 mov(top_reg, result);
1272 add(top_reg, Immediate(object_size));
1273 j(carry, gc_required);
1274 cmp(top_reg, Operand::StaticVariable(new_space_allocation_limit));
1275 j(above, gc_required);
1277 // Update allocation top.
1278 UpdateAllocationTopHelper(top_reg, scratch);
1280 // Tag result if requested.
1281 if (top_reg.is(result)) {
1282 if ((flags & TAG_OBJECT) != 0) {
1283 sub(result, Immediate(object_size - kHeapObjectTag));
1285 sub(result, Immediate(object_size));
1287 } else if ((flags & TAG_OBJECT) != 0) {
1288 add(result, Immediate(kHeapObjectTag));
1293 void MacroAssembler::AllocateInNewSpace(int header_size,
1294 ScaleFactor element_size,
1295 Register element_count,
1297 Register result_end,
1300 AllocationFlags flags) {
1301 if (!FLAG_inline_new) {
1302 if (emit_debug_code()) {
1303 // Trash the registers to simulate an allocation failure.
1304 mov(result, Immediate(0x7091));
1305 mov(result_end, Immediate(0x7191));
1306 if (scratch.is_valid()) {
1307 mov(scratch, Immediate(0x7291));
1309 // Register element_count is not modified by the function.
1314 ASSERT(!result.is(result_end));
1316 // Load address of new object into result.
1317 LoadAllocationTopHelper(result, scratch, flags);
1319 // Calculate new top and bail out if new space is exhausted.
1320 ExternalReference new_space_allocation_limit =
1321 ExternalReference::new_space_allocation_limit_address(isolate());
1323 // We assume that element_count*element_size + header_size does not
1325 lea(result_end, Operand(element_count, element_size, header_size));
1326 add(result_end, result);
1327 j(carry, gc_required);
1328 cmp(result_end, Operand::StaticVariable(new_space_allocation_limit));
1329 j(above, gc_required);
1331 // Tag result if requested.
1332 if ((flags & TAG_OBJECT) != 0) {
1333 lea(result, Operand(result, kHeapObjectTag));
1336 // Update allocation top.
1337 UpdateAllocationTopHelper(result_end, scratch);
1341 void MacroAssembler::AllocateInNewSpace(Register object_size,
1343 Register result_end,
1346 AllocationFlags flags) {
1347 if (!FLAG_inline_new) {
1348 if (emit_debug_code()) {
1349 // Trash the registers to simulate an allocation failure.
1350 mov(result, Immediate(0x7091));
1351 mov(result_end, Immediate(0x7191));
1352 if (scratch.is_valid()) {
1353 mov(scratch, Immediate(0x7291));
1355 // object_size is left unchanged by this function.
1360 ASSERT(!result.is(result_end));
1362 // Load address of new object into result.
1363 LoadAllocationTopHelper(result, scratch, flags);
1365 // Calculate new top and bail out if new space is exhausted.
1366 ExternalReference new_space_allocation_limit =
1367 ExternalReference::new_space_allocation_limit_address(isolate());
1368 if (!object_size.is(result_end)) {
1369 mov(result_end, object_size);
1371 add(result_end, result);
1372 j(carry, gc_required);
1373 cmp(result_end, Operand::StaticVariable(new_space_allocation_limit));
1374 j(above, gc_required);
1376 // Tag result if requested.
1377 if ((flags & TAG_OBJECT) != 0) {
1378 lea(result, Operand(result, kHeapObjectTag));
1381 // Update allocation top.
1382 UpdateAllocationTopHelper(result_end, scratch);
1386 void MacroAssembler::UndoAllocationInNewSpace(Register object) {
1387 ExternalReference new_space_allocation_top =
1388 ExternalReference::new_space_allocation_top_address(isolate());
1390 // Make sure the object has no tag before resetting top.
1391 and_(object, Immediate(~kHeapObjectTagMask));
1393 cmp(object, Operand::StaticVariable(new_space_allocation_top));
1394 Check(below, "Undo allocation of non allocated memory");
1396 mov(Operand::StaticVariable(new_space_allocation_top), object);
1400 void MacroAssembler::AllocateHeapNumber(Register result,
1403 Label* gc_required) {
1404 // Allocate heap number in new space.
1405 AllocateInNewSpace(HeapNumber::kSize,
1413 mov(FieldOperand(result, HeapObject::kMapOffset),
1414 Immediate(isolate()->factory()->heap_number_map()));
1418 void MacroAssembler::AllocateTwoByteString(Register result,
1423 Label* gc_required) {
1424 // Calculate the number of bytes needed for the characters in the string while
1425 // observing object alignment.
1426 ASSERT((SeqTwoByteString::kHeaderSize & kObjectAlignmentMask) == 0);
1427 ASSERT(kShortSize == 2);
1428 // scratch1 = length * 2 + kObjectAlignmentMask.
1429 lea(scratch1, Operand(length, length, times_1, kObjectAlignmentMask));
1430 and_(scratch1, Immediate(~kObjectAlignmentMask));
1432 // Allocate two byte string in new space.
1433 AllocateInNewSpace(SeqTwoByteString::kHeaderSize,
1442 // Set the map, length and hash field.
1443 mov(FieldOperand(result, HeapObject::kMapOffset),
1444 Immediate(isolate()->factory()->string_map()));
1445 mov(scratch1, length);
1447 mov(FieldOperand(result, String::kLengthOffset), scratch1);
1448 mov(FieldOperand(result, String::kHashFieldOffset),
1449 Immediate(String::kEmptyHashField));
1453 void MacroAssembler::AllocateAsciiString(Register result,
1458 Label* gc_required) {
1459 // Calculate the number of bytes needed for the characters in the string while
1460 // observing object alignment.
1461 ASSERT((SeqOneByteString::kHeaderSize & kObjectAlignmentMask) == 0);
1462 mov(scratch1, length);
1463 ASSERT(kCharSize == 1);
1464 add(scratch1, Immediate(kObjectAlignmentMask));
1465 and_(scratch1, Immediate(~kObjectAlignmentMask));
1467 // Allocate ASCII string in new space.
1468 AllocateInNewSpace(SeqOneByteString::kHeaderSize,
1477 // Set the map, length and hash field.
1478 mov(FieldOperand(result, HeapObject::kMapOffset),
1479 Immediate(isolate()->factory()->ascii_string_map()));
1480 mov(scratch1, length);
1482 mov(FieldOperand(result, String::kLengthOffset), scratch1);
1483 mov(FieldOperand(result, String::kHashFieldOffset),
1484 Immediate(String::kEmptyHashField));
1488 void MacroAssembler::AllocateAsciiString(Register result,
1492 Label* gc_required) {
1495 // Allocate ASCII string in new space.
1496 AllocateInNewSpace(SeqOneByteString::SizeFor(length),
1503 // Set the map, length and hash field.
1504 mov(FieldOperand(result, HeapObject::kMapOffset),
1505 Immediate(isolate()->factory()->ascii_string_map()));
1506 mov(FieldOperand(result, String::kLengthOffset),
1507 Immediate(Smi::FromInt(length)));
1508 mov(FieldOperand(result, String::kHashFieldOffset),
1509 Immediate(String::kEmptyHashField));
1513 void MacroAssembler::AllocateTwoByteConsString(Register result,
1516 Label* gc_required) {
1517 // Allocate heap number in new space.
1518 AllocateInNewSpace(ConsString::kSize,
1525 // Set the map. The other fields are left uninitialized.
1526 mov(FieldOperand(result, HeapObject::kMapOffset),
1527 Immediate(isolate()->factory()->cons_string_map()));
1531 void MacroAssembler::AllocateAsciiConsString(Register result,
1534 Label* gc_required) {
1535 // Allocate heap number in new space.
1536 AllocateInNewSpace(ConsString::kSize,
1543 // Set the map. The other fields are left uninitialized.
1544 mov(FieldOperand(result, HeapObject::kMapOffset),
1545 Immediate(isolate()->factory()->cons_ascii_string_map()));
1549 void MacroAssembler::AllocateTwoByteSlicedString(Register result,
1552 Label* gc_required) {
1553 // Allocate heap number in new space.
1554 AllocateInNewSpace(SlicedString::kSize,
1561 // Set the map. The other fields are left uninitialized.
1562 mov(FieldOperand(result, HeapObject::kMapOffset),
1563 Immediate(isolate()->factory()->sliced_string_map()));
1567 void MacroAssembler::AllocateAsciiSlicedString(Register result,
1570 Label* gc_required) {
1571 // Allocate heap number in new space.
1572 AllocateInNewSpace(SlicedString::kSize,
1579 // Set the map. The other fields are left uninitialized.
1580 mov(FieldOperand(result, HeapObject::kMapOffset),
1581 Immediate(isolate()->factory()->sliced_ascii_string_map()));
1585 // Copy memory, byte-by-byte, from source to destination. Not optimized for
1586 // long or aligned copies. The contents of scratch and length are destroyed.
1587 // Source and destination are incremented by length.
1588 // Many variants of movsb, loop unrolling, word moves, and indexed operands
1589 // have been tried here already, and this is fastest.
1590 // A simpler loop is faster on small copies, but 30% slower on large ones.
1591 // The cld() instruction must have been emitted, to set the direction flag(),
1592 // before calling this function.
1593 void MacroAssembler::CopyBytes(Register source,
1594 Register destination,
1597 Label loop, done, short_string, short_loop;
1598 // Experimentation shows that the short string loop is faster if length < 10.
1599 cmp(length, Immediate(10));
1600 j(less_equal, &short_string);
1602 ASSERT(source.is(esi));
1603 ASSERT(destination.is(edi));
1604 ASSERT(length.is(ecx));
1606 // Because source is 4-byte aligned in our uses of this function,
1607 // we keep source aligned for the rep_movs call by copying the odd bytes
1608 // at the end of the ranges.
1609 mov(scratch, Operand(source, length, times_1, -4));
1610 mov(Operand(destination, length, times_1, -4), scratch);
1614 and_(scratch, Immediate(0x3));
1615 add(destination, scratch);
1618 bind(&short_string);
1619 test(length, length);
1623 mov_b(scratch, Operand(source, 0));
1624 mov_b(Operand(destination, 0), scratch);
1628 j(not_zero, &short_loop);
1634 void MacroAssembler::InitializeFieldsWithFiller(Register start_offset,
1635 Register end_offset,
1640 mov(Operand(start_offset, 0), filler);
1641 add(start_offset, Immediate(kPointerSize));
1643 cmp(start_offset, end_offset);
1648 void MacroAssembler::BooleanBitTest(Register object,
1651 bit_index += kSmiTagSize + kSmiShiftSize;
1652 ASSERT(IsPowerOf2(kBitsPerByte));
1653 int byte_index = bit_index / kBitsPerByte;
1654 int byte_bit_index = bit_index & (kBitsPerByte - 1);
1655 test_b(FieldOperand(object, field_offset + byte_index),
1656 static_cast<byte>(1 << byte_bit_index));
1661 void MacroAssembler::NegativeZeroTest(Register result,
1663 Label* then_label) {
1665 test(result, result);
1668 j(sign, then_label);
1673 void MacroAssembler::NegativeZeroTest(Register result,
1677 Label* then_label) {
1679 test(result, result);
1683 j(sign, then_label);
1688 void MacroAssembler::TryGetFunctionPrototype(Register function,
1692 bool miss_on_bound_function) {
1693 // Check that the receiver isn't a smi.
1694 JumpIfSmi(function, miss);
1696 // Check that the function really is a function.
1697 CmpObjectType(function, JS_FUNCTION_TYPE, result);
1700 if (miss_on_bound_function) {
1701 // If a bound function, go to miss label.
1703 FieldOperand(function, JSFunction::kSharedFunctionInfoOffset));
1704 BooleanBitTest(scratch, SharedFunctionInfo::kCompilerHintsOffset,
1705 SharedFunctionInfo::kBoundFunction);
1709 // Make sure that the function has an instance prototype.
1711 movzx_b(scratch, FieldOperand(result, Map::kBitFieldOffset));
1712 test(scratch, Immediate(1 << Map::kHasNonInstancePrototype));
1713 j(not_zero, &non_instance);
1715 // Get the prototype or initial map from the function.
1717 FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
1719 // If the prototype or initial map is the hole, don't return it and
1720 // simply miss the cache instead. This will allow us to allocate a
1721 // prototype object on-demand in the runtime system.
1722 cmp(result, Immediate(isolate()->factory()->the_hole_value()));
1725 // If the function does not have an initial map, we're done.
1727 CmpObjectType(result, MAP_TYPE, scratch);
1728 j(not_equal, &done);
1730 // Get the prototype from the initial map.
1731 mov(result, FieldOperand(result, Map::kPrototypeOffset));
1734 // Non-instance prototype: Fetch prototype from constructor field
1736 bind(&non_instance);
1737 mov(result, FieldOperand(result, Map::kConstructorOffset));
1744 void MacroAssembler::CallStub(CodeStub* stub, TypeFeedbackId ast_id) {
1745 ASSERT(AllowThisStubCall(stub)); // Calls are not allowed in some stubs.
1746 call(stub->GetCode(), RelocInfo::CODE_TARGET, ast_id);
1750 void MacroAssembler::TailCallStub(CodeStub* stub) {
1751 ASSERT(allow_stub_calls_ || stub->CompilingCallsToThisStubIsGCSafe());
1752 jmp(stub->GetCode(), RelocInfo::CODE_TARGET);
1756 void MacroAssembler::StubReturn(int argc) {
1757 ASSERT(argc >= 1 && generating_stub());
1758 ret((argc - 1) * kPointerSize);
1762 bool MacroAssembler::AllowThisStubCall(CodeStub* stub) {
1763 if (!has_frame_ && stub->SometimesSetsUpAFrame()) return false;
1764 return allow_stub_calls_ || stub->CompilingCallsToThisStubIsGCSafe();
1768 void MacroAssembler::IllegalOperation(int num_arguments) {
1769 if (num_arguments > 0) {
1770 add(esp, Immediate(num_arguments * kPointerSize));
1772 mov(eax, Immediate(isolate()->factory()->undefined_value()));
1776 void MacroAssembler::IndexFromHash(Register hash, Register index) {
1777 // The assert checks that the constants for the maximum number of digits
1778 // for an array index cached in the hash field and the number of bits
1779 // reserved for it does not conflict.
1780 ASSERT(TenToThe(String::kMaxCachedArrayIndexLength) <
1781 (1 << String::kArrayIndexValueBits));
1782 // We want the smi-tagged index in key. kArrayIndexValueMask has zeros in
1783 // the low kHashShift bits.
1784 and_(hash, String::kArrayIndexValueMask);
1785 STATIC_ASSERT(String::kHashShift >= kSmiTagSize && kSmiTag == 0);
1786 if (String::kHashShift > kSmiTagSize) {
1787 shr(hash, String::kHashShift - kSmiTagSize);
1789 if (!index.is(hash)) {
1795 void MacroAssembler::CallRuntime(Runtime::FunctionId id, int num_arguments) {
1796 CallRuntime(Runtime::FunctionForId(id), num_arguments);
1800 void MacroAssembler::CallRuntimeSaveDoubles(Runtime::FunctionId id) {
1801 const Runtime::Function* function = Runtime::FunctionForId(id);
1802 Set(eax, Immediate(function->nargs));
1803 mov(ebx, Immediate(ExternalReference(function, isolate())));
1804 CEntryStub ces(1, kSaveFPRegs);
1809 void MacroAssembler::CallRuntime(const Runtime::Function* f,
1810 int num_arguments) {
1811 // If the expected number of arguments of the runtime function is
1812 // constant, we check that the actual number of arguments match the
1814 if (f->nargs >= 0 && f->nargs != num_arguments) {
1815 IllegalOperation(num_arguments);
1819 // TODO(1236192): Most runtime routines don't need the number of
1820 // arguments passed in because it is constant. At some point we
1821 // should remove this need and make the runtime routine entry code
1823 Set(eax, Immediate(num_arguments));
1824 mov(ebx, Immediate(ExternalReference(f, isolate())));
1830 void MacroAssembler::CallExternalReference(ExternalReference ref,
1831 int num_arguments) {
1832 mov(eax, Immediate(num_arguments));
1833 mov(ebx, Immediate(ref));
1840 void MacroAssembler::TailCallExternalReference(const ExternalReference& ext,
1843 // TODO(1236192): Most runtime routines don't need the number of
1844 // arguments passed in because it is constant. At some point we
1845 // should remove this need and make the runtime routine entry code
1847 Set(eax, Immediate(num_arguments));
1848 JumpToExternalReference(ext);
1852 void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid,
1855 TailCallExternalReference(ExternalReference(fid, isolate()),
1861 // If true, a Handle<T> returned by value from a function with cdecl calling
1862 // convention will be returned directly as a value of location_ field in a
1864 // If false, it is returned as a pointer to a preallocated by caller memory
1865 // region. Pointer to this region should be passed to a function as an
1866 // implicit first argument.
1867 #if defined(USING_BSD_ABI) || defined(__MINGW32__) || defined(__CYGWIN__)
1868 static const bool kReturnHandlesDirectly = true;
1870 static const bool kReturnHandlesDirectly = false;
1874 Operand ApiParameterOperand(int index) {
1876 esp, (index + (kReturnHandlesDirectly ? 0 : 1)) * kPointerSize);
1880 void MacroAssembler::PrepareCallApiFunction(int argc) {
1881 if (kReturnHandlesDirectly) {
1882 EnterApiExitFrame(argc);
1883 // When handles are returned directly we don't have to allocate extra
1884 // space for and pass an out parameter.
1885 if (emit_debug_code()) {
1886 mov(esi, Immediate(BitCast<int32_t>(kZapValue)));
1889 // We allocate two additional slots: return value and pointer to it.
1890 EnterApiExitFrame(argc + 2);
1892 // The argument slots are filled as follows:
1894 // n + 1: output slot
1898 // 0: pointer to the output slot
1900 lea(esi, Operand(esp, (argc + 1) * kPointerSize));
1901 mov(Operand(esp, 0 * kPointerSize), esi);
1902 if (emit_debug_code()) {
1903 mov(Operand(esi, 0), Immediate(0));
1909 void MacroAssembler::CallApiFunctionAndReturn(Address function_address,
1911 ExternalReference next_address =
1912 ExternalReference::handle_scope_next_address();
1913 ExternalReference limit_address =
1914 ExternalReference::handle_scope_limit_address();
1915 ExternalReference level_address =
1916 ExternalReference::handle_scope_level_address();
1918 // Allocate HandleScope in callee-save registers.
1919 mov(ebx, Operand::StaticVariable(next_address));
1920 mov(edi, Operand::StaticVariable(limit_address));
1921 add(Operand::StaticVariable(level_address), Immediate(1));
1923 if (FLAG_log_timer_events) {
1924 FrameScope frame(this, StackFrame::MANUAL);
1925 PushSafepointRegisters();
1926 PrepareCallCFunction(0, eax);
1927 CallCFunction(ExternalReference::log_enter_external_function(isolate()), 0);
1928 PopSafepointRegisters();
1931 // Call the api function.
1932 call(function_address, RelocInfo::RUNTIME_ENTRY);
1934 if (FLAG_log_timer_events) {
1935 FrameScope frame(this, StackFrame::MANUAL);
1936 PushSafepointRegisters();
1937 PrepareCallCFunction(0, eax);
1938 CallCFunction(ExternalReference::log_leave_external_function(isolate()), 0);
1939 PopSafepointRegisters();
1942 if (!kReturnHandlesDirectly) {
1943 // PrepareCallApiFunction saved pointer to the output slot into
1944 // callee-save register esi.
1945 mov(eax, Operand(esi, 0));
1950 Label promote_scheduled_exception;
1951 Label delete_allocated_handles;
1952 Label leave_exit_frame;
1954 // Check if the result handle holds 0.
1956 j(zero, &empty_handle);
1957 // It was non-zero. Dereference to get the result value.
1958 mov(eax, Operand(eax, 0));
1960 // No more valid handles (the result handle was the last one). Restore
1961 // previous handle scope.
1962 mov(Operand::StaticVariable(next_address), ebx);
1963 sub(Operand::StaticVariable(level_address), Immediate(1));
1964 Assert(above_equal, "Invalid HandleScope level");
1965 cmp(edi, Operand::StaticVariable(limit_address));
1966 j(not_equal, &delete_allocated_handles);
1967 bind(&leave_exit_frame);
1969 // Check if the function scheduled an exception.
1970 ExternalReference scheduled_exception_address =
1971 ExternalReference::scheduled_exception_address(isolate());
1972 cmp(Operand::StaticVariable(scheduled_exception_address),
1973 Immediate(isolate()->factory()->the_hole_value()));
1974 j(not_equal, &promote_scheduled_exception);
1976 #if ENABLE_EXTRA_CHECKS
1977 // Check if the function returned a valid JavaScript value.
1979 Register return_value = eax;
1982 JumpIfSmi(return_value, &ok, Label::kNear);
1983 mov(map, FieldOperand(return_value, HeapObject::kMapOffset));
1985 CmpInstanceType(map, FIRST_NONSTRING_TYPE);
1986 j(below, &ok, Label::kNear);
1988 CmpInstanceType(map, FIRST_SPEC_OBJECT_TYPE);
1989 j(above_equal, &ok, Label::kNear);
1991 cmp(map, isolate()->factory()->heap_number_map());
1992 j(equal, &ok, Label::kNear);
1994 cmp(return_value, isolate()->factory()->undefined_value());
1995 j(equal, &ok, Label::kNear);
1997 cmp(return_value, isolate()->factory()->true_value());
1998 j(equal, &ok, Label::kNear);
2000 cmp(return_value, isolate()->factory()->false_value());
2001 j(equal, &ok, Label::kNear);
2003 cmp(return_value, isolate()->factory()->null_value());
2004 j(equal, &ok, Label::kNear);
2006 Abort("API call returned invalid object");
2011 LeaveApiExitFrame();
2012 ret(stack_space * kPointerSize);
2014 bind(&empty_handle);
2015 // It was zero; the result is undefined.
2016 mov(eax, isolate()->factory()->undefined_value());
2019 bind(&promote_scheduled_exception);
2020 TailCallRuntime(Runtime::kPromoteScheduledException, 0, 1);
2022 // HandleScope limit has changed. Delete allocated extensions.
2023 ExternalReference delete_extensions =
2024 ExternalReference::delete_handle_scope_extensions(isolate());
2025 bind(&delete_allocated_handles);
2026 mov(Operand::StaticVariable(limit_address), edi);
2028 mov(Operand(esp, 0), Immediate(ExternalReference::isolate_address()));
2029 mov(eax, Immediate(delete_extensions));
2032 jmp(&leave_exit_frame);
2036 void MacroAssembler::JumpToExternalReference(const ExternalReference& ext) {
2037 // Set the entry point and jump to the C entry runtime stub.
2038 mov(ebx, Immediate(ext));
2040 jmp(ces.GetCode(), RelocInfo::CODE_TARGET);
2044 void MacroAssembler::SetCallKind(Register dst, CallKind call_kind) {
2045 // This macro takes the dst register to make the code more readable
2046 // at the call sites. However, the dst register has to be ecx to
2047 // follow the calling convention which requires the call type to be
2049 ASSERT(dst.is(ecx));
2050 if (call_kind == CALL_AS_FUNCTION) {
2051 // Set to some non-zero smi by updating the least significant
2053 mov_b(dst, 1 << kSmiTagSize);
2055 // Set to smi zero by clearing the register.
2061 void MacroAssembler::InvokePrologue(const ParameterCount& expected,
2062 const ParameterCount& actual,
2063 Handle<Code> code_constant,
2064 const Operand& code_operand,
2066 bool* definitely_mismatches,
2068 Label::Distance done_near,
2069 const CallWrapper& call_wrapper,
2070 CallKind call_kind) {
2071 bool definitely_matches = false;
2072 *definitely_mismatches = false;
2074 if (expected.is_immediate()) {
2075 ASSERT(actual.is_immediate());
2076 if (expected.immediate() == actual.immediate()) {
2077 definitely_matches = true;
2079 mov(eax, actual.immediate());
2080 const int sentinel = SharedFunctionInfo::kDontAdaptArgumentsSentinel;
2081 if (expected.immediate() == sentinel) {
2082 // Don't worry about adapting arguments for builtins that
2083 // don't want that done. Skip adaption code by making it look
2084 // like we have a match between expected and actual number of
2086 definitely_matches = true;
2088 *definitely_mismatches = true;
2089 mov(ebx, expected.immediate());
2093 if (actual.is_immediate()) {
2094 // Expected is in register, actual is immediate. This is the
2095 // case when we invoke function values without going through the
2097 cmp(expected.reg(), actual.immediate());
2099 ASSERT(expected.reg().is(ebx));
2100 mov(eax, actual.immediate());
2101 } else if (!expected.reg().is(actual.reg())) {
2102 // Both expected and actual are in (different) registers. This
2103 // is the case when we invoke functions using call and apply.
2104 cmp(expected.reg(), actual.reg());
2106 ASSERT(actual.reg().is(eax));
2107 ASSERT(expected.reg().is(ebx));
2111 if (!definitely_matches) {
2112 Handle<Code> adaptor =
2113 isolate()->builtins()->ArgumentsAdaptorTrampoline();
2114 if (!code_constant.is_null()) {
2115 mov(edx, Immediate(code_constant));
2116 add(edx, Immediate(Code::kHeaderSize - kHeapObjectTag));
2117 } else if (!code_operand.is_reg(edx)) {
2118 mov(edx, code_operand);
2121 if (flag == CALL_FUNCTION) {
2122 call_wrapper.BeforeCall(CallSize(adaptor, RelocInfo::CODE_TARGET));
2123 SetCallKind(ecx, call_kind);
2124 call(adaptor, RelocInfo::CODE_TARGET);
2125 call_wrapper.AfterCall();
2126 if (!*definitely_mismatches) {
2127 jmp(done, done_near);
2130 SetCallKind(ecx, call_kind);
2131 jmp(adaptor, RelocInfo::CODE_TARGET);
2138 void MacroAssembler::InvokeCode(const Operand& code,
2139 const ParameterCount& expected,
2140 const ParameterCount& actual,
2142 const CallWrapper& call_wrapper,
2143 CallKind call_kind) {
2144 // You can't call a function without a valid frame.
2145 ASSERT(flag == JUMP_FUNCTION || has_frame());
2148 bool definitely_mismatches = false;
2149 InvokePrologue(expected, actual, Handle<Code>::null(), code,
2150 &done, &definitely_mismatches, flag, Label::kNear,
2151 call_wrapper, call_kind);
2152 if (!definitely_mismatches) {
2153 if (flag == CALL_FUNCTION) {
2154 call_wrapper.BeforeCall(CallSize(code));
2155 SetCallKind(ecx, call_kind);
2157 call_wrapper.AfterCall();
2159 ASSERT(flag == JUMP_FUNCTION);
2160 SetCallKind(ecx, call_kind);
2168 void MacroAssembler::InvokeCode(Handle<Code> code,
2169 const ParameterCount& expected,
2170 const ParameterCount& actual,
2171 RelocInfo::Mode rmode,
2173 const CallWrapper& call_wrapper,
2174 CallKind call_kind) {
2175 // You can't call a function without a valid frame.
2176 ASSERT(flag == JUMP_FUNCTION || has_frame());
2179 Operand dummy(eax, 0);
2180 bool definitely_mismatches = false;
2181 InvokePrologue(expected, actual, code, dummy, &done, &definitely_mismatches,
2182 flag, Label::kNear, call_wrapper, call_kind);
2183 if (!definitely_mismatches) {
2184 if (flag == CALL_FUNCTION) {
2185 call_wrapper.BeforeCall(CallSize(code, rmode));
2186 SetCallKind(ecx, call_kind);
2188 call_wrapper.AfterCall();
2190 ASSERT(flag == JUMP_FUNCTION);
2191 SetCallKind(ecx, call_kind);
2199 void MacroAssembler::InvokeFunction(Register fun,
2200 const ParameterCount& actual,
2202 const CallWrapper& call_wrapper,
2203 CallKind call_kind) {
2204 // You can't call a function without a valid frame.
2205 ASSERT(flag == JUMP_FUNCTION || has_frame());
2207 ASSERT(fun.is(edi));
2208 mov(edx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
2209 mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
2210 mov(ebx, FieldOperand(edx, SharedFunctionInfo::kFormalParameterCountOffset));
2213 ParameterCount expected(ebx);
2214 InvokeCode(FieldOperand(edi, JSFunction::kCodeEntryOffset),
2215 expected, actual, flag, call_wrapper, call_kind);
2219 void MacroAssembler::InvokeFunction(Handle<JSFunction> function,
2220 const ParameterCount& actual,
2222 const CallWrapper& call_wrapper,
2223 CallKind call_kind) {
2224 // You can't call a function without a valid frame.
2225 ASSERT(flag == JUMP_FUNCTION || has_frame());
2227 // Get the function and setup the context.
2228 LoadHeapObject(edi, function);
2229 mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
2231 ParameterCount expected(function->shared()->formal_parameter_count());
2232 // We call indirectly through the code field in the function to
2233 // allow recompilation to take effect without changing any of the
2235 InvokeCode(FieldOperand(edi, JSFunction::kCodeEntryOffset),
2236 expected, actual, flag, call_wrapper, call_kind);
2240 void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id,
2242 const CallWrapper& call_wrapper) {
2243 // You can't call a builtin without a valid frame.
2244 ASSERT(flag == JUMP_FUNCTION || has_frame());
2246 // Rely on the assertion to check that the number of provided
2247 // arguments match the expected number of arguments. Fake a
2248 // parameter count to avoid emitting code to do the check.
2249 ParameterCount expected(0);
2250 GetBuiltinFunction(edi, id);
2251 InvokeCode(FieldOperand(edi, JSFunction::kCodeEntryOffset),
2252 expected, expected, flag, call_wrapper, CALL_AS_METHOD);
2256 void MacroAssembler::GetBuiltinFunction(Register target,
2257 Builtins::JavaScript id) {
2258 // Load the JavaScript builtin function from the builtins object.
2259 mov(target, Operand(esi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
2260 mov(target, FieldOperand(target, GlobalObject::kBuiltinsOffset));
2261 mov(target, FieldOperand(target,
2262 JSBuiltinsObject::OffsetOfFunctionWithId(id)));
2266 void MacroAssembler::GetBuiltinEntry(Register target, Builtins::JavaScript id) {
2267 ASSERT(!target.is(edi));
2268 // Load the JavaScript builtin function from the builtins object.
2269 GetBuiltinFunction(edi, id);
2270 // Load the code entry point from the function into the target register.
2271 mov(target, FieldOperand(edi, JSFunction::kCodeEntryOffset));
2275 void MacroAssembler::LoadContext(Register dst, int context_chain_length) {
2276 if (context_chain_length > 0) {
2277 // Move up the chain of contexts to the context containing the slot.
2278 mov(dst, Operand(esi, Context::SlotOffset(Context::PREVIOUS_INDEX)));
2279 for (int i = 1; i < context_chain_length; i++) {
2280 mov(dst, Operand(dst, Context::SlotOffset(Context::PREVIOUS_INDEX)));
2283 // Slot is in the current function context. Move it into the
2284 // destination register in case we store into it (the write barrier
2285 // cannot be allowed to destroy the context in esi).
2289 // We should not have found a with context by walking the context chain
2290 // (i.e., the static scope chain and runtime context chain do not agree).
2291 // A variable occurring in such a scope should have slot type LOOKUP and
2293 if (emit_debug_code()) {
2294 cmp(FieldOperand(dst, HeapObject::kMapOffset),
2295 isolate()->factory()->with_context_map());
2296 Check(not_equal, "Variable resolved to with context.");
2301 void MacroAssembler::LoadTransitionedArrayMapConditional(
2302 ElementsKind expected_kind,
2303 ElementsKind transitioned_kind,
2304 Register map_in_out,
2306 Label* no_map_match) {
2307 // Load the global or builtins object from the current context.
2308 mov(scratch, Operand(esi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
2309 mov(scratch, FieldOperand(scratch, GlobalObject::kNativeContextOffset));
2311 // Check that the function's map is the same as the expected cached map.
2312 mov(scratch, Operand(scratch,
2313 Context::SlotOffset(Context::JS_ARRAY_MAPS_INDEX)));
2315 size_t offset = expected_kind * kPointerSize +
2316 FixedArrayBase::kHeaderSize;
2317 cmp(map_in_out, FieldOperand(scratch, offset));
2318 j(not_equal, no_map_match);
2320 // Use the transitioned cached map.
2321 offset = transitioned_kind * kPointerSize +
2322 FixedArrayBase::kHeaderSize;
2323 mov(map_in_out, FieldOperand(scratch, offset));
2327 void MacroAssembler::LoadInitialArrayMap(
2328 Register function_in, Register scratch,
2329 Register map_out, bool can_have_holes) {
2330 ASSERT(!function_in.is(map_out));
2332 mov(map_out, FieldOperand(function_in,
2333 JSFunction::kPrototypeOrInitialMapOffset));
2334 if (!FLAG_smi_only_arrays) {
2335 ElementsKind kind = can_have_holes ? FAST_HOLEY_ELEMENTS : FAST_ELEMENTS;
2336 LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS,
2341 } else if (can_have_holes) {
2342 LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS,
2343 FAST_HOLEY_SMI_ELEMENTS,
2352 void MacroAssembler::LoadGlobalFunction(int index, Register function) {
2353 // Load the global or builtins object from the current context.
2355 Operand(esi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
2356 // Load the native context from the global or builtins object.
2357 mov(function, FieldOperand(function, GlobalObject::kNativeContextOffset));
2358 // Load the function from the native context.
2359 mov(function, Operand(function, Context::SlotOffset(index)));
2363 void MacroAssembler::LoadGlobalFunctionInitialMap(Register function,
2365 // Load the initial map. The global functions all have initial maps.
2366 mov(map, FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
2367 if (emit_debug_code()) {
2369 CheckMap(map, isolate()->factory()->meta_map(), &fail, DO_SMI_CHECK);
2372 Abort("Global functions must have initial map");
2378 // Store the value in register src in the safepoint register stack
2379 // slot for register dst.
2380 void MacroAssembler::StoreToSafepointRegisterSlot(Register dst, Register src) {
2381 mov(SafepointRegisterSlot(dst), src);
2385 void MacroAssembler::StoreToSafepointRegisterSlot(Register dst, Immediate src) {
2386 mov(SafepointRegisterSlot(dst), src);
2390 void MacroAssembler::LoadFromSafepointRegisterSlot(Register dst, Register src) {
2391 mov(dst, SafepointRegisterSlot(src));
2395 Operand MacroAssembler::SafepointRegisterSlot(Register reg) {
2396 return Operand(esp, SafepointRegisterStackIndex(reg.code()) * kPointerSize);
2400 int MacroAssembler::SafepointRegisterStackIndex(int reg_code) {
2401 // The registers are pushed starting with the lowest encoding,
2402 // which means that lowest encodings are furthest away from
2403 // the stack pointer.
2404 ASSERT(reg_code >= 0 && reg_code < kNumSafepointRegisters);
2405 return kNumSafepointRegisters - reg_code - 1;
2409 void MacroAssembler::LoadHeapObject(Register result,
2410 Handle<HeapObject> object) {
2411 if (isolate()->heap()->InNewSpace(*object)) {
2412 Handle<JSGlobalPropertyCell> cell =
2413 isolate()->factory()->NewJSGlobalPropertyCell(object);
2414 mov(result, Operand::Cell(cell));
2416 mov(result, object);
2421 void MacroAssembler::PushHeapObject(Handle<HeapObject> object) {
2422 if (isolate()->heap()->InNewSpace(*object)) {
2423 Handle<JSGlobalPropertyCell> cell =
2424 isolate()->factory()->NewJSGlobalPropertyCell(object);
2425 push(Operand::Cell(cell));
2432 void MacroAssembler::Ret() {
2437 void MacroAssembler::Ret(int bytes_dropped, Register scratch) {
2438 if (is_uint16(bytes_dropped)) {
2442 add(esp, Immediate(bytes_dropped));
2449 void MacroAssembler::Drop(int stack_elements) {
2450 if (stack_elements > 0) {
2451 add(esp, Immediate(stack_elements * kPointerSize));
2456 void MacroAssembler::Move(Register dst, Register src) {
2463 void MacroAssembler::SetCounter(StatsCounter* counter, int value) {
2464 if (FLAG_native_code_counters && counter->Enabled()) {
2465 mov(Operand::StaticVariable(ExternalReference(counter)), Immediate(value));
2470 void MacroAssembler::IncrementCounter(StatsCounter* counter, int value) {
2472 if (FLAG_native_code_counters && counter->Enabled()) {
2473 Operand operand = Operand::StaticVariable(ExternalReference(counter));
2477 add(operand, Immediate(value));
2483 void MacroAssembler::DecrementCounter(StatsCounter* counter, int value) {
2485 if (FLAG_native_code_counters && counter->Enabled()) {
2486 Operand operand = Operand::StaticVariable(ExternalReference(counter));
2490 sub(operand, Immediate(value));
2496 void MacroAssembler::IncrementCounter(Condition cc,
2497 StatsCounter* counter,
2500 if (FLAG_native_code_counters && counter->Enabled()) {
2502 j(NegateCondition(cc), &skip);
2504 IncrementCounter(counter, value);
2511 void MacroAssembler::DecrementCounter(Condition cc,
2512 StatsCounter* counter,
2515 if (FLAG_native_code_counters && counter->Enabled()) {
2517 j(NegateCondition(cc), &skip);
2519 DecrementCounter(counter, value);
2526 void MacroAssembler::Assert(Condition cc, const char* msg) {
2527 if (emit_debug_code()) Check(cc, msg);
2531 void MacroAssembler::AssertFastElements(Register elements) {
2532 if (emit_debug_code()) {
2533 Factory* factory = isolate()->factory();
2535 cmp(FieldOperand(elements, HeapObject::kMapOffset),
2536 Immediate(factory->fixed_array_map()));
2538 cmp(FieldOperand(elements, HeapObject::kMapOffset),
2539 Immediate(factory->fixed_double_array_map()));
2541 cmp(FieldOperand(elements, HeapObject::kMapOffset),
2542 Immediate(factory->fixed_cow_array_map()));
2544 Abort("JSObject with fast elements map has slow elements");
2550 void MacroAssembler::Check(Condition cc, const char* msg) {
2554 // will not return here
2559 void MacroAssembler::CheckStackAlignment() {
2560 int frame_alignment = OS::ActivationFrameAlignment();
2561 int frame_alignment_mask = frame_alignment - 1;
2562 if (frame_alignment > kPointerSize) {
2563 ASSERT(IsPowerOf2(frame_alignment));
2564 Label alignment_as_expected;
2565 test(esp, Immediate(frame_alignment_mask));
2566 j(zero, &alignment_as_expected);
2567 // Abort if stack is not aligned.
2569 bind(&alignment_as_expected);
2574 void MacroAssembler::Abort(const char* msg) {
2575 // We want to pass the msg string like a smi to avoid GC
2576 // problems, however msg is not guaranteed to be aligned
2577 // properly. Instead, we pass an aligned pointer that is
2578 // a proper v8 smi, but also pass the alignment difference
2579 // from the real pointer as a smi.
2580 intptr_t p1 = reinterpret_cast<intptr_t>(msg);
2581 intptr_t p0 = (p1 & ~kSmiTagMask) + kSmiTag;
2582 ASSERT(reinterpret_cast<Object*>(p0)->IsSmi());
2585 RecordComment("Abort message: ");
2591 push(Immediate(p0));
2592 push(Immediate(reinterpret_cast<intptr_t>(Smi::FromInt(p1 - p0))));
2593 // Disable stub call restrictions to always allow calls to abort.
2595 // We don't actually want to generate a pile of code for this, so just
2596 // claim there is a stack frame, without generating one.
2597 FrameScope scope(this, StackFrame::NONE);
2598 CallRuntime(Runtime::kAbort, 2);
2600 CallRuntime(Runtime::kAbort, 2);
2602 // will not return here
2607 void MacroAssembler::LoadInstanceDescriptors(Register map,
2608 Register descriptors) {
2609 mov(descriptors, FieldOperand(map, Map::kDescriptorsOffset));
2613 void MacroAssembler::NumberOfOwnDescriptors(Register dst, Register map) {
2614 mov(dst, FieldOperand(map, Map::kBitField3Offset));
2615 DecodeField<Map::NumberOfOwnDescriptorsBits>(dst);
2619 void MacroAssembler::LoadPowerOf2(XMMRegister dst,
2622 ASSERT(is_uintn(power + HeapNumber::kExponentBias,
2623 HeapNumber::kExponentBits));
2624 mov(scratch, Immediate(power + HeapNumber::kExponentBias));
2626 psllq(dst, HeapNumber::kMantissaBits);
2630 void MacroAssembler::JumpIfInstanceTypeIsNotSequentialAscii(
2631 Register instance_type,
2634 if (!scratch.is(instance_type)) {
2635 mov(scratch, instance_type);
2638 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask);
2639 cmp(scratch, kStringTag | kSeqStringTag | kOneByteStringTag);
2640 j(not_equal, failure);
2644 void MacroAssembler::JumpIfNotBothSequentialAsciiStrings(Register object1,
2649 // Check that both objects are not smis.
2650 STATIC_ASSERT(kSmiTag == 0);
2651 mov(scratch1, object1);
2652 and_(scratch1, object2);
2653 JumpIfSmi(scratch1, failure);
2655 // Load instance type for both strings.
2656 mov(scratch1, FieldOperand(object1, HeapObject::kMapOffset));
2657 mov(scratch2, FieldOperand(object2, HeapObject::kMapOffset));
2658 movzx_b(scratch1, FieldOperand(scratch1, Map::kInstanceTypeOffset));
2659 movzx_b(scratch2, FieldOperand(scratch2, Map::kInstanceTypeOffset));
2661 // Check that both are flat ASCII strings.
2662 const int kFlatAsciiStringMask = kIsNotStringMask | kStringRepresentationMask
2663 | kStringEncodingMask | kAsciiDataHintTag;
2664 const int kFlatAsciiStringTag = ASCII_STRING_TYPE;
2665 // Interleave bits from both instance types and compare them in one check.
2666 ASSERT_EQ(0, kFlatAsciiStringMask & (kFlatAsciiStringMask << 8));
2667 ASSERT_EQ(ASCII_STRING_TYPE, ASCII_STRING_TYPE & kFlatAsciiStringMask);
2668 and_(scratch1, kFlatAsciiStringMask);
2669 and_(scratch2, kFlatAsciiStringMask);
2671 or_(scratch1, scratch2);
2672 cmp(scratch1, kFlatAsciiStringTag | (kFlatAsciiStringTag << 8));
2673 j(not_equal, failure);
2677 void MacroAssembler::PrepareCallCFunction(int num_arguments, Register scratch) {
2678 int frame_alignment = OS::ActivationFrameAlignment();
2679 if (frame_alignment != 0) {
2680 // Make stack end at alignment and make room for num_arguments words
2681 // and the original value of esp.
2683 sub(esp, Immediate((num_arguments + 1) * kPointerSize));
2684 ASSERT(IsPowerOf2(frame_alignment));
2685 and_(esp, -frame_alignment);
2686 mov(Operand(esp, num_arguments * kPointerSize), scratch);
2688 sub(esp, Immediate(num_arguments * kPointerSize));
2693 void MacroAssembler::CallCFunction(ExternalReference function,
2694 int num_arguments) {
2695 // Trashing eax is ok as it will be the return value.
2696 mov(eax, Immediate(function));
2697 CallCFunction(eax, num_arguments);
2701 void MacroAssembler::CallCFunction(Register function,
2702 int num_arguments) {
2703 ASSERT(has_frame());
2704 // Check stack alignment.
2705 if (emit_debug_code()) {
2706 CheckStackAlignment();
2710 if (OS::ActivationFrameAlignment() != 0) {
2711 mov(esp, Operand(esp, num_arguments * kPointerSize));
2713 add(esp, Immediate(num_arguments * kPointerSize));
2718 bool AreAliased(Register r1, Register r2, Register r3, Register r4) {
2719 if (r1.is(r2)) return true;
2720 if (r1.is(r3)) return true;
2721 if (r1.is(r4)) return true;
2722 if (r2.is(r3)) return true;
2723 if (r2.is(r4)) return true;
2724 if (r3.is(r4)) return true;
2729 CodePatcher::CodePatcher(byte* address, int size)
2730 : address_(address),
2732 masm_(NULL, address, size + Assembler::kGap) {
2733 // Create a new macro assembler pointing to the address of the code to patch.
2734 // The size is adjusted with kGap on order for the assembler to generate size
2735 // bytes of instructions without failing with buffer size constraints.
2736 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
2740 CodePatcher::~CodePatcher() {
2741 // Indicate that code has changed.
2742 CPU::FlushICache(address_, size_);
2744 // Check that the code was patched as expected.
2745 ASSERT(masm_.pc_ == address_ + size_);
2746 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
2750 void MacroAssembler::CheckPageFlag(
2755 Label* condition_met,
2756 Label::Distance condition_met_distance) {
2757 ASSERT(cc == zero || cc == not_zero);
2758 if (scratch.is(object)) {
2759 and_(scratch, Immediate(~Page::kPageAlignmentMask));
2761 mov(scratch, Immediate(~Page::kPageAlignmentMask));
2762 and_(scratch, object);
2764 if (mask < (1 << kBitsPerByte)) {
2765 test_b(Operand(scratch, MemoryChunk::kFlagsOffset),
2766 static_cast<uint8_t>(mask));
2768 test(Operand(scratch, MemoryChunk::kFlagsOffset), Immediate(mask));
2770 j(cc, condition_met, condition_met_distance);
2774 void MacroAssembler::CheckPageFlagForMap(
2778 Label* condition_met,
2779 Label::Distance condition_met_distance) {
2780 ASSERT(cc == zero || cc == not_zero);
2781 Page* page = Page::FromAddress(map->address());
2782 ExternalReference reference(ExternalReference::page_flags(page));
2783 // The inlined static address check of the page's flags relies
2784 // on maps never being compacted.
2785 ASSERT(!isolate()->heap()->mark_compact_collector()->
2786 IsOnEvacuationCandidate(*map));
2787 if (mask < (1 << kBitsPerByte)) {
2788 test_b(Operand::StaticVariable(reference), static_cast<uint8_t>(mask));
2790 test(Operand::StaticVariable(reference), Immediate(mask));
2792 j(cc, condition_met, condition_met_distance);
2796 void MacroAssembler::JumpIfBlack(Register object,
2800 Label::Distance on_black_near) {
2801 HasColor(object, scratch0, scratch1,
2802 on_black, on_black_near,
2803 1, 0); // kBlackBitPattern.
2804 ASSERT(strcmp(Marking::kBlackBitPattern, "10") == 0);
2808 void MacroAssembler::HasColor(Register object,
2809 Register bitmap_scratch,
2810 Register mask_scratch,
2812 Label::Distance has_color_distance,
2815 ASSERT(!AreAliased(object, bitmap_scratch, mask_scratch, ecx));
2817 GetMarkBits(object, bitmap_scratch, mask_scratch);
2819 Label other_color, word_boundary;
2820 test(mask_scratch, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
2821 j(first_bit == 1 ? zero : not_zero, &other_color, Label::kNear);
2822 add(mask_scratch, mask_scratch); // Shift left 1 by adding.
2823 j(zero, &word_boundary, Label::kNear);
2824 test(mask_scratch, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
2825 j(second_bit == 1 ? not_zero : zero, has_color, has_color_distance);
2826 jmp(&other_color, Label::kNear);
2828 bind(&word_boundary);
2829 test_b(Operand(bitmap_scratch, MemoryChunk::kHeaderSize + kPointerSize), 1);
2831 j(second_bit == 1 ? not_zero : zero, has_color, has_color_distance);
2836 void MacroAssembler::GetMarkBits(Register addr_reg,
2837 Register bitmap_reg,
2838 Register mask_reg) {
2839 ASSERT(!AreAliased(addr_reg, mask_reg, bitmap_reg, ecx));
2840 mov(bitmap_reg, Immediate(~Page::kPageAlignmentMask));
2841 and_(bitmap_reg, addr_reg);
2844 Bitmap::kBitsPerCellLog2 + kPointerSizeLog2 - Bitmap::kBytesPerCellLog2;
2847 (Page::kPageAlignmentMask >> shift) & ~(Bitmap::kBytesPerCell - 1));
2849 add(bitmap_reg, ecx);
2851 shr(ecx, kPointerSizeLog2);
2852 and_(ecx, (1 << Bitmap::kBitsPerCellLog2) - 1);
2853 mov(mask_reg, Immediate(1));
2858 void MacroAssembler::EnsureNotWhite(
2860 Register bitmap_scratch,
2861 Register mask_scratch,
2862 Label* value_is_white_and_not_data,
2863 Label::Distance distance) {
2864 ASSERT(!AreAliased(value, bitmap_scratch, mask_scratch, ecx));
2865 GetMarkBits(value, bitmap_scratch, mask_scratch);
2867 // If the value is black or grey we don't need to do anything.
2868 ASSERT(strcmp(Marking::kWhiteBitPattern, "00") == 0);
2869 ASSERT(strcmp(Marking::kBlackBitPattern, "10") == 0);
2870 ASSERT(strcmp(Marking::kGreyBitPattern, "11") == 0);
2871 ASSERT(strcmp(Marking::kImpossibleBitPattern, "01") == 0);
2875 // Since both black and grey have a 1 in the first position and white does
2876 // not have a 1 there we only need to check one bit.
2877 test(mask_scratch, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
2878 j(not_zero, &done, Label::kNear);
2880 if (emit_debug_code()) {
2881 // Check for impossible bit pattern.
2884 // shl. May overflow making the check conservative.
2885 add(mask_scratch, mask_scratch);
2886 test(mask_scratch, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
2887 j(zero, &ok, Label::kNear);
2893 // Value is white. We check whether it is data that doesn't need scanning.
2894 // Currently only checks for HeapNumber and non-cons strings.
2895 Register map = ecx; // Holds map while checking type.
2896 Register length = ecx; // Holds length of object after checking type.
2897 Label not_heap_number;
2898 Label is_data_object;
2900 // Check for heap-number
2901 mov(map, FieldOperand(value, HeapObject::kMapOffset));
2902 cmp(map, FACTORY->heap_number_map());
2903 j(not_equal, ¬_heap_number, Label::kNear);
2904 mov(length, Immediate(HeapNumber::kSize));
2905 jmp(&is_data_object, Label::kNear);
2907 bind(¬_heap_number);
2908 // Check for strings.
2909 ASSERT(kIsIndirectStringTag == 1 && kIsIndirectStringMask == 1);
2910 ASSERT(kNotStringTag == 0x80 && kIsNotStringMask == 0x80);
2911 // If it's a string and it's not a cons string then it's an object containing
2913 Register instance_type = ecx;
2914 movzx_b(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
2915 test_b(instance_type, kIsIndirectStringMask | kIsNotStringMask);
2916 j(not_zero, value_is_white_and_not_data);
2917 // It's a non-indirect (non-cons and non-slice) string.
2918 // If it's external, the length is just ExternalString::kSize.
2919 // Otherwise it's String::kHeaderSize + string->length() * (1 or 2).
2921 // External strings are the only ones with the kExternalStringTag bit
2923 ASSERT_EQ(0, kSeqStringTag & kExternalStringTag);
2924 ASSERT_EQ(0, kConsStringTag & kExternalStringTag);
2925 test_b(instance_type, kExternalStringTag);
2926 j(zero, ¬_external, Label::kNear);
2927 mov(length, Immediate(ExternalString::kSize));
2928 jmp(&is_data_object, Label::kNear);
2930 bind(¬_external);
2931 // Sequential string, either ASCII or UC16.
2932 ASSERT(kOneByteStringTag == 0x04);
2933 and_(length, Immediate(kStringEncodingMask));
2934 xor_(length, Immediate(kStringEncodingMask));
2935 add(length, Immediate(0x04));
2936 // Value now either 4 (if ASCII) or 8 (if UC16), i.e., char-size shifted
2937 // by 2. If we multiply the string length as smi by this, it still
2938 // won't overflow a 32-bit value.
2939 ASSERT_EQ(SeqOneByteString::kMaxSize, SeqTwoByteString::kMaxSize);
2940 ASSERT(SeqOneByteString::kMaxSize <=
2941 static_cast<int>(0xffffffffu >> (2 + kSmiTagSize)));
2942 imul(length, FieldOperand(value, String::kLengthOffset));
2943 shr(length, 2 + kSmiTagSize + kSmiShiftSize);
2944 add(length, Immediate(SeqString::kHeaderSize + kObjectAlignmentMask));
2945 and_(length, Immediate(~kObjectAlignmentMask));
2947 bind(&is_data_object);
2948 // Value is a data object, and it is white. Mark it black. Since we know
2949 // that the object is white we can make it black by flipping one bit.
2950 or_(Operand(bitmap_scratch, MemoryChunk::kHeaderSize), mask_scratch);
2952 and_(bitmap_scratch, Immediate(~Page::kPageAlignmentMask));
2953 add(Operand(bitmap_scratch, MemoryChunk::kLiveBytesOffset),
2955 if (emit_debug_code()) {
2956 mov(length, Operand(bitmap_scratch, MemoryChunk::kLiveBytesOffset));
2957 cmp(length, Operand(bitmap_scratch, MemoryChunk::kSizeOffset));
2958 Check(less_equal, "Live Bytes Count overflow chunk size");
2965 void MacroAssembler::EnumLength(Register dst, Register map) {
2966 STATIC_ASSERT(Map::EnumLengthBits::kShift == 0);
2967 mov(dst, FieldOperand(map, Map::kBitField3Offset));
2968 and_(dst, Immediate(Smi::FromInt(Map::EnumLengthBits::kMask)));
2972 void MacroAssembler::CheckEnumCache(Label* call_runtime) {
2976 // Check if the enum length field is properly initialized, indicating that
2977 // there is an enum cache.
2978 mov(ebx, FieldOperand(ecx, HeapObject::kMapOffset));
2980 EnumLength(edx, ebx);
2981 cmp(edx, Immediate(Smi::FromInt(Map::kInvalidEnumCache)));
2982 j(equal, call_runtime);
2987 mov(ebx, FieldOperand(ecx, HeapObject::kMapOffset));
2989 // For all objects but the receiver, check that the cache is empty.
2990 EnumLength(edx, ebx);
2991 cmp(edx, Immediate(Smi::FromInt(0)));
2992 j(not_equal, call_runtime);
2996 // Check that there are no elements. Register rcx contains the current JS
2997 // object we've reached through the prototype chain.
2998 mov(ecx, FieldOperand(ecx, JSObject::kElementsOffset));
2999 cmp(ecx, isolate()->factory()->empty_fixed_array());
3000 j(not_equal, call_runtime);
3002 mov(ecx, FieldOperand(ebx, Map::kPrototypeOffset));
3003 cmp(ecx, isolate()->factory()->null_value());
3004 j(not_equal, &next);
3007 } } // namespace v8::internal
3009 #endif // V8_TARGET_ARCH_IA32