1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30 #if defined(V8_TARGET_ARCH_IA32)
32 #include "bootstrapper.h"
36 #include "serialize.h"
41 // -------------------------------------------------------------------------
42 // MacroAssembler implementation.
44 MacroAssembler::MacroAssembler(Isolate* arg_isolate, void* buffer, int size)
45 : Assembler(arg_isolate, buffer, size),
46 generating_stub_(false),
47 allow_stub_calls_(true),
49 if (isolate() != NULL) {
50 code_object_ = Handle<Object>(isolate()->heap()->undefined_value(),
56 void MacroAssembler::InNewSpace(
61 Label::Distance condition_met_distance) {
62 ASSERT(cc == equal || cc == not_equal);
63 if (scratch.is(object)) {
64 and_(scratch, Immediate(~Page::kPageAlignmentMask));
66 mov(scratch, Immediate(~Page::kPageAlignmentMask));
67 and_(scratch, object);
69 // Check that we can use a test_b.
70 ASSERT(MemoryChunk::IN_FROM_SPACE < 8);
71 ASSERT(MemoryChunk::IN_TO_SPACE < 8);
72 int mask = (1 << MemoryChunk::IN_FROM_SPACE)
73 | (1 << MemoryChunk::IN_TO_SPACE);
74 // If non-zero, the page belongs to new-space.
75 test_b(Operand(scratch, MemoryChunk::kFlagsOffset),
76 static_cast<uint8_t>(mask));
77 j(cc, condition_met, condition_met_distance);
81 void MacroAssembler::RememberedSetHelper(
82 Register object, // Only used for debug checks.
85 SaveFPRegsMode save_fp,
86 MacroAssembler::RememberedSetFinalAction and_then) {
88 if (emit_debug_code()) {
90 JumpIfNotInNewSpace(object, scratch, &ok, Label::kNear);
94 // Load store buffer top.
95 ExternalReference store_buffer =
96 ExternalReference::store_buffer_top(isolate());
97 mov(scratch, Operand::StaticVariable(store_buffer));
98 // Store pointer to buffer.
99 mov(Operand(scratch, 0), addr);
100 // Increment buffer top.
101 add(scratch, Immediate(kPointerSize));
102 // Write back new top of buffer.
103 mov(Operand::StaticVariable(store_buffer), scratch);
104 // Call stub on end of buffer.
105 // Check for end of buffer.
106 test(scratch, Immediate(StoreBuffer::kStoreBufferOverflowBit));
107 if (and_then == kReturnAtEnd) {
108 Label buffer_overflowed;
109 j(not_equal, &buffer_overflowed, Label::kNear);
111 bind(&buffer_overflowed);
113 ASSERT(and_then == kFallThroughAtEnd);
114 j(equal, &done, Label::kNear);
116 StoreBufferOverflowStub store_buffer_overflow =
117 StoreBufferOverflowStub(save_fp);
118 CallStub(&store_buffer_overflow);
119 if (and_then == kReturnAtEnd) {
122 ASSERT(and_then == kFallThroughAtEnd);
128 void MacroAssembler::ClampDoubleToUint8(XMMRegister input_reg,
129 XMMRegister scratch_reg,
130 Register result_reg) {
132 ExternalReference zero_ref = ExternalReference::address_of_zero();
133 movdbl(scratch_reg, Operand::StaticVariable(zero_ref));
134 Set(result_reg, Immediate(0));
135 ucomisd(input_reg, scratch_reg);
136 j(below, &done, Label::kNear);
137 cvtsd2si(result_reg, input_reg);
138 test(result_reg, Immediate(0xFFFFFF00));
139 j(zero, &done, Label::kNear);
140 Set(result_reg, Immediate(255));
145 void MacroAssembler::ClampUint8(Register reg) {
147 test(reg, Immediate(0xFFFFFF00));
148 j(zero, &done, Label::kNear);
149 setcc(negative, reg); // 1 if negative, 0 if positive.
150 dec_b(reg); // 0 if negative, 255 if positive.
155 static double kUint32Bias =
156 static_cast<double>(static_cast<uint32_t>(0xFFFFFFFF)) + 1;
159 void MacroAssembler::LoadUint32(XMMRegister dst,
161 XMMRegister scratch) {
163 cmp(src, Immediate(0));
165 Operand(reinterpret_cast<int32_t>(&kUint32Bias), RelocInfo::NONE));
167 j(not_sign, &done, Label::kNear);
173 void MacroAssembler::RecordWriteArray(Register object,
176 SaveFPRegsMode save_fp,
177 RememberedSetAction remembered_set_action,
178 SmiCheck smi_check) {
179 // First, check if a write barrier is even needed. The tests below
180 // catch stores of Smis.
183 // Skip barrier if writing a smi.
184 if (smi_check == INLINE_SMI_CHECK) {
185 ASSERT_EQ(0, kSmiTag);
186 test(value, Immediate(kSmiTagMask));
190 // Array access: calculate the destination address in the same manner as
191 // KeyedStoreIC::GenerateGeneric. Multiply a smi by 2 to get an offset
192 // into an array of words.
193 Register dst = index;
194 lea(dst, Operand(object, index, times_half_pointer_size,
195 FixedArray::kHeaderSize - kHeapObjectTag));
198 object, dst, value, save_fp, remembered_set_action, OMIT_SMI_CHECK);
202 // Clobber clobbered input registers when running with the debug-code flag
203 // turned on to provoke errors.
204 if (emit_debug_code()) {
205 mov(value, Immediate(BitCast<int32_t>(kZapValue)));
206 mov(index, Immediate(BitCast<int32_t>(kZapValue)));
211 void MacroAssembler::RecordWriteField(
216 SaveFPRegsMode save_fp,
217 RememberedSetAction remembered_set_action,
218 SmiCheck smi_check) {
219 // First, check if a write barrier is even needed. The tests below
220 // catch stores of Smis.
223 // Skip barrier if writing a smi.
224 if (smi_check == INLINE_SMI_CHECK) {
225 JumpIfSmi(value, &done, Label::kNear);
228 // Although the object register is tagged, the offset is relative to the start
229 // of the object, so so offset must be a multiple of kPointerSize.
230 ASSERT(IsAligned(offset, kPointerSize));
232 lea(dst, FieldOperand(object, offset));
233 if (emit_debug_code()) {
235 test_b(dst, (1 << kPointerSizeLog2) - 1);
236 j(zero, &ok, Label::kNear);
242 object, dst, value, save_fp, remembered_set_action, OMIT_SMI_CHECK);
246 // Clobber clobbered input registers when running with the debug-code flag
247 // turned on to provoke errors.
248 if (emit_debug_code()) {
249 mov(value, Immediate(BitCast<int32_t>(kZapValue)));
250 mov(dst, Immediate(BitCast<int32_t>(kZapValue)));
255 void MacroAssembler::RecordWriteForMap(
260 SaveFPRegsMode save_fp) {
263 Register address = scratch1;
264 Register value = scratch2;
265 if (emit_debug_code()) {
267 lea(address, FieldOperand(object, HeapObject::kMapOffset));
268 test_b(address, (1 << kPointerSizeLog2) - 1);
269 j(zero, &ok, Label::kNear);
274 ASSERT(!object.is(value));
275 ASSERT(!object.is(address));
276 ASSERT(!value.is(address));
277 if (emit_debug_code()) {
281 if (!FLAG_incremental_marking) {
285 // A single check of the map's pages interesting flag suffices, since it is
286 // only set during incremental collection, and then it's also guaranteed that
287 // the from object's page's interesting flag is also set. This optimization
288 // relies on the fact that maps can never be in new space.
289 ASSERT(!isolate()->heap()->InNewSpace(*map));
290 CheckPageFlagForMap(map,
291 MemoryChunk::kPointersToHereAreInterestingMask,
296 // Delay the initialization of |address| and |value| for the stub until it's
297 // known that the will be needed. Up until this point their values are not
298 // needed since they are embedded in the operands of instructions that need
300 lea(address, FieldOperand(object, HeapObject::kMapOffset));
301 mov(value, Immediate(map));
302 RecordWriteStub stub(object, value, address, OMIT_REMEMBERED_SET, save_fp);
307 // Clobber clobbered input registers when running with the debug-code flag
308 // turned on to provoke errors.
309 if (emit_debug_code()) {
310 mov(value, Immediate(BitCast<int32_t>(kZapValue)));
311 mov(scratch1, Immediate(BitCast<int32_t>(kZapValue)));
312 mov(scratch2, Immediate(BitCast<int32_t>(kZapValue)));
317 void MacroAssembler::RecordWrite(Register object,
320 SaveFPRegsMode fp_mode,
321 RememberedSetAction remembered_set_action,
322 SmiCheck smi_check) {
323 ASSERT(!object.is(value));
324 ASSERT(!object.is(address));
325 ASSERT(!value.is(address));
326 if (emit_debug_code()) {
330 if (remembered_set_action == OMIT_REMEMBERED_SET &&
331 !FLAG_incremental_marking) {
335 if (emit_debug_code()) {
337 cmp(value, Operand(address, 0));
338 j(equal, &ok, Label::kNear);
343 // First, check if a write barrier is even needed. The tests below
344 // catch stores of Smis and stores into young gen.
347 if (smi_check == INLINE_SMI_CHECK) {
348 // Skip barrier if writing a smi.
349 JumpIfSmi(value, &done, Label::kNear);
353 value, // Used as scratch.
354 MemoryChunk::kPointersToHereAreInterestingMask,
358 CheckPageFlag(object,
359 value, // Used as scratch.
360 MemoryChunk::kPointersFromHereAreInterestingMask,
365 RecordWriteStub stub(object, value, address, remembered_set_action, fp_mode);
370 // Clobber clobbered registers when running with the debug-code flag
371 // turned on to provoke errors.
372 if (emit_debug_code()) {
373 mov(address, Immediate(BitCast<int32_t>(kZapValue)));
374 mov(value, Immediate(BitCast<int32_t>(kZapValue)));
379 #ifdef ENABLE_DEBUGGER_SUPPORT
380 void MacroAssembler::DebugBreak() {
381 Set(eax, Immediate(0));
382 mov(ebx, Immediate(ExternalReference(Runtime::kDebugBreak, isolate())));
384 call(ces.GetCode(), RelocInfo::DEBUG_BREAK);
389 void MacroAssembler::Set(Register dst, const Immediate& x) {
391 xor_(dst, dst); // Shorter than mov.
398 void MacroAssembler::Set(const Operand& dst, const Immediate& x) {
403 bool MacroAssembler::IsUnsafeImmediate(const Immediate& x) {
404 static const int kMaxImmediateBits = 17;
405 if (x.rmode_ != RelocInfo::NONE) return false;
406 return !is_intn(x.x_, kMaxImmediateBits);
410 void MacroAssembler::SafeSet(Register dst, const Immediate& x) {
411 if (IsUnsafeImmediate(x) && jit_cookie() != 0) {
412 Set(dst, Immediate(x.x_ ^ jit_cookie()));
413 xor_(dst, jit_cookie());
420 void MacroAssembler::SafePush(const Immediate& x) {
421 if (IsUnsafeImmediate(x) && jit_cookie() != 0) {
422 push(Immediate(x.x_ ^ jit_cookie()));
423 xor_(Operand(esp, 0), Immediate(jit_cookie()));
430 void MacroAssembler::CompareRoot(Register with, Heap::RootListIndex index) {
431 // see ROOT_ACCESSOR macro in factory.h
432 Handle<Object> value(&isolate()->heap()->roots_array_start()[index]);
437 void MacroAssembler::CompareRoot(const Operand& with,
438 Heap::RootListIndex index) {
439 // see ROOT_ACCESSOR macro in factory.h
440 Handle<Object> value(&isolate()->heap()->roots_array_start()[index]);
445 void MacroAssembler::CmpObjectType(Register heap_object,
448 mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
449 CmpInstanceType(map, type);
453 void MacroAssembler::CmpInstanceType(Register map, InstanceType type) {
454 cmpb(FieldOperand(map, Map::kInstanceTypeOffset),
455 static_cast<int8_t>(type));
459 void MacroAssembler::CheckFastElements(Register map,
461 Label::Distance distance) {
462 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
463 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
464 STATIC_ASSERT(FAST_ELEMENTS == 2);
465 STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3);
466 cmpb(FieldOperand(map, Map::kBitField2Offset),
467 Map::kMaximumBitField2FastHoleyElementValue);
468 j(above, fail, distance);
472 void MacroAssembler::CheckFastObjectElements(Register map,
474 Label::Distance distance) {
475 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
476 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
477 STATIC_ASSERT(FAST_ELEMENTS == 2);
478 STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3);
479 cmpb(FieldOperand(map, Map::kBitField2Offset),
480 Map::kMaximumBitField2FastHoleySmiElementValue);
481 j(below_equal, fail, distance);
482 cmpb(FieldOperand(map, Map::kBitField2Offset),
483 Map::kMaximumBitField2FastHoleyElementValue);
484 j(above, fail, distance);
488 void MacroAssembler::CheckFastSmiElements(Register map,
490 Label::Distance distance) {
491 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
492 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
493 cmpb(FieldOperand(map, Map::kBitField2Offset),
494 Map::kMaximumBitField2FastHoleySmiElementValue);
495 j(above, fail, distance);
499 void MacroAssembler::StoreNumberToDoubleElements(
500 Register maybe_number,
504 XMMRegister scratch2,
506 bool specialize_for_processor) {
507 Label smi_value, done, maybe_nan, not_nan, is_nan, have_double_value;
508 JumpIfSmi(maybe_number, &smi_value, Label::kNear);
510 CheckMap(maybe_number,
511 isolate()->factory()->heap_number_map(),
515 // Double value, canonicalize NaN.
516 uint32_t offset = HeapNumber::kValueOffset + sizeof(kHoleNanLower32);
517 cmp(FieldOperand(maybe_number, offset),
518 Immediate(kNaNOrInfinityLowerBoundUpper32));
519 j(greater_equal, &maybe_nan, Label::kNear);
522 ExternalReference canonical_nan_reference =
523 ExternalReference::address_of_canonical_non_hole_nan();
524 if (CpuFeatures::IsSupported(SSE2) && specialize_for_processor) {
525 CpuFeatures::Scope use_sse2(SSE2);
526 movdbl(scratch2, FieldOperand(maybe_number, HeapNumber::kValueOffset));
527 bind(&have_double_value);
528 movdbl(FieldOperand(elements, key, times_4, FixedDoubleArray::kHeaderSize),
531 fld_d(FieldOperand(maybe_number, HeapNumber::kValueOffset));
532 bind(&have_double_value);
533 fstp_d(FieldOperand(elements, key, times_4, FixedDoubleArray::kHeaderSize));
538 // Could be NaN or Infinity. If fraction is not zero, it's NaN, otherwise
539 // it's an Infinity, and the non-NaN code path applies.
540 j(greater, &is_nan, Label::kNear);
541 cmp(FieldOperand(maybe_number, HeapNumber::kValueOffset), Immediate(0));
544 if (CpuFeatures::IsSupported(SSE2) && specialize_for_processor) {
545 CpuFeatures::Scope use_sse2(SSE2);
546 movdbl(scratch2, Operand::StaticVariable(canonical_nan_reference));
548 fld_d(Operand::StaticVariable(canonical_nan_reference));
550 jmp(&have_double_value, Label::kNear);
553 // Value is a smi. Convert to a double and store.
554 // Preserve original value.
555 mov(scratch1, maybe_number);
557 if (CpuFeatures::IsSupported(SSE2) && specialize_for_processor) {
558 CpuFeatures::Scope fscope(SSE2);
559 cvtsi2sd(scratch2, scratch1);
560 movdbl(FieldOperand(elements, key, times_4, FixedDoubleArray::kHeaderSize),
564 fild_s(Operand(esp, 0));
566 fstp_d(FieldOperand(elements, key, times_4, FixedDoubleArray::kHeaderSize));
572 void MacroAssembler::CompareMap(Register obj,
574 Label* early_success,
575 CompareMapMode mode) {
576 cmp(FieldOperand(obj, HeapObject::kMapOffset), map);
577 if (mode == ALLOW_ELEMENT_TRANSITION_MAPS) {
578 ElementsKind kind = map->elements_kind();
579 if (IsFastElementsKind(kind)) {
580 bool packed = IsFastPackedElementsKind(kind);
581 Map* current_map = *map;
582 while (CanTransitionToMoreGeneralFastElementsKind(kind, packed)) {
583 kind = GetNextMoreGeneralFastElementsKind(kind, packed);
584 current_map = current_map->LookupElementsTransitionMap(kind);
585 if (!current_map) break;
586 j(equal, early_success, Label::kNear);
587 cmp(FieldOperand(obj, HeapObject::kMapOffset),
588 Handle<Map>(current_map));
595 void MacroAssembler::CheckMap(Register obj,
598 SmiCheckType smi_check_type,
599 CompareMapMode mode) {
600 if (smi_check_type == DO_SMI_CHECK) {
601 JumpIfSmi(obj, fail);
605 CompareMap(obj, map, &success, mode);
611 void MacroAssembler::DispatchMap(Register obj,
613 Handle<Code> success,
614 SmiCheckType smi_check_type) {
616 if (smi_check_type == DO_SMI_CHECK) {
617 JumpIfSmi(obj, &fail);
619 cmp(FieldOperand(obj, HeapObject::kMapOffset), Immediate(map));
626 Condition MacroAssembler::IsObjectStringType(Register heap_object,
628 Register instance_type) {
629 mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
630 movzx_b(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
631 STATIC_ASSERT(kNotStringTag != 0);
632 test(instance_type, Immediate(kIsNotStringMask));
637 void MacroAssembler::IsObjectJSObjectType(Register heap_object,
641 mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
642 IsInstanceJSObjectType(map, scratch, fail);
646 void MacroAssembler::IsInstanceJSObjectType(Register map,
649 movzx_b(scratch, FieldOperand(map, Map::kInstanceTypeOffset));
650 sub(scratch, Immediate(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
652 LAST_NONCALLABLE_SPEC_OBJECT_TYPE - FIRST_NONCALLABLE_SPEC_OBJECT_TYPE);
657 void MacroAssembler::FCmp() {
658 if (CpuFeatures::IsSupported(CMOV)) {
671 void MacroAssembler::AbortIfNotNumber(Register object) {
673 JumpIfSmi(object, &ok);
674 cmp(FieldOperand(object, HeapObject::kMapOffset),
675 isolate()->factory()->heap_number_map());
676 Assert(equal, "Operand not a number");
681 void MacroAssembler::AbortIfNotSmi(Register object) {
682 test(object, Immediate(kSmiTagMask));
683 Assert(equal, "Operand is not a smi");
687 void MacroAssembler::AbortIfNotString(Register object) {
688 test(object, Immediate(kSmiTagMask));
689 Assert(not_equal, "Operand is not a string");
691 mov(object, FieldOperand(object, HeapObject::kMapOffset));
692 CmpInstanceType(object, FIRST_NONSTRING_TYPE);
694 Assert(below, "Operand is not a string");
698 void MacroAssembler::AbortIfSmi(Register object) {
699 test(object, Immediate(kSmiTagMask));
700 Assert(not_equal, "Operand is a smi");
704 void MacroAssembler::EnterFrame(StackFrame::Type type) {
708 push(Immediate(Smi::FromInt(type)));
709 push(Immediate(CodeObject()));
710 if (emit_debug_code()) {
711 cmp(Operand(esp, 0), Immediate(isolate()->factory()->undefined_value()));
712 Check(not_equal, "code object not properly patched");
717 void MacroAssembler::LeaveFrame(StackFrame::Type type) {
718 if (emit_debug_code()) {
719 cmp(Operand(ebp, StandardFrameConstants::kMarkerOffset),
720 Immediate(Smi::FromInt(type)));
721 Check(equal, "stack frame types must match");
727 void MacroAssembler::EnterExitFramePrologue() {
728 // Set up the frame structure on the stack.
729 ASSERT(ExitFrameConstants::kCallerSPDisplacement == +2 * kPointerSize);
730 ASSERT(ExitFrameConstants::kCallerPCOffset == +1 * kPointerSize);
731 ASSERT(ExitFrameConstants::kCallerFPOffset == 0 * kPointerSize);
735 // Reserve room for entry stack pointer and push the code object.
736 ASSERT(ExitFrameConstants::kSPOffset == -1 * kPointerSize);
737 push(Immediate(0)); // Saved entry sp, patched before call.
738 push(Immediate(CodeObject())); // Accessed from ExitFrame::code_slot.
740 // Save the frame pointer and the context in top.
741 ExternalReference c_entry_fp_address(Isolate::kCEntryFPAddress,
743 ExternalReference context_address(Isolate::kContextAddress,
745 mov(Operand::StaticVariable(c_entry_fp_address), ebp);
746 mov(Operand::StaticVariable(context_address), esi);
750 void MacroAssembler::EnterExitFrameEpilogue(int argc, bool save_doubles) {
751 // Optionally save all XMM registers.
753 CpuFeatures::Scope scope(SSE2);
754 int space = XMMRegister::kNumRegisters * kDoubleSize + argc * kPointerSize;
755 sub(esp, Immediate(space));
756 const int offset = -2 * kPointerSize;
757 for (int i = 0; i < XMMRegister::kNumRegisters; i++) {
758 XMMRegister reg = XMMRegister::from_code(i);
759 movdbl(Operand(ebp, offset - ((i + 1) * kDoubleSize)), reg);
762 sub(esp, Immediate(argc * kPointerSize));
765 // Get the required frame alignment for the OS.
766 const int kFrameAlignment = OS::ActivationFrameAlignment();
767 if (kFrameAlignment > 0) {
768 ASSERT(IsPowerOf2(kFrameAlignment));
769 and_(esp, -kFrameAlignment);
772 // Patch the saved entry sp.
773 mov(Operand(ebp, ExitFrameConstants::kSPOffset), esp);
777 void MacroAssembler::EnterExitFrame(bool save_doubles) {
778 EnterExitFramePrologue();
780 // Set up argc and argv in callee-saved registers.
781 int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
783 lea(esi, Operand(ebp, eax, times_4, offset));
785 // Reserve space for argc, argv and isolate.
786 EnterExitFrameEpilogue(3, save_doubles);
790 void MacroAssembler::EnterApiExitFrame(int argc) {
791 EnterExitFramePrologue();
792 EnterExitFrameEpilogue(argc, false);
796 void MacroAssembler::LeaveExitFrame(bool save_doubles) {
797 // Optionally restore all XMM registers.
799 CpuFeatures::Scope scope(SSE2);
800 const int offset = -2 * kPointerSize;
801 for (int i = 0; i < XMMRegister::kNumRegisters; i++) {
802 XMMRegister reg = XMMRegister::from_code(i);
803 movdbl(reg, Operand(ebp, offset - ((i + 1) * kDoubleSize)));
807 // Get the return address from the stack and restore the frame pointer.
808 mov(ecx, Operand(ebp, 1 * kPointerSize));
809 mov(ebp, Operand(ebp, 0 * kPointerSize));
811 // Pop the arguments and the receiver from the caller stack.
812 lea(esp, Operand(esi, 1 * kPointerSize));
814 // Push the return address to get ready to return.
817 LeaveExitFrameEpilogue();
820 void MacroAssembler::LeaveExitFrameEpilogue() {
821 // Restore current context from top and clear it in debug mode.
822 ExternalReference context_address(Isolate::kContextAddress, isolate());
823 mov(esi, Operand::StaticVariable(context_address));
825 mov(Operand::StaticVariable(context_address), Immediate(0));
828 // Clear the top frame.
829 ExternalReference c_entry_fp_address(Isolate::kCEntryFPAddress,
831 mov(Operand::StaticVariable(c_entry_fp_address), Immediate(0));
835 void MacroAssembler::LeaveApiExitFrame() {
839 LeaveExitFrameEpilogue();
843 void MacroAssembler::PushTryHandler(StackHandler::Kind kind,
845 // Adjust this code if not the case.
846 STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize);
847 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
848 STATIC_ASSERT(StackHandlerConstants::kCodeOffset == 1 * kPointerSize);
849 STATIC_ASSERT(StackHandlerConstants::kStateOffset == 2 * kPointerSize);
850 STATIC_ASSERT(StackHandlerConstants::kContextOffset == 3 * kPointerSize);
851 STATIC_ASSERT(StackHandlerConstants::kFPOffset == 4 * kPointerSize);
853 // We will build up the handler from the bottom by pushing on the stack.
854 // First push the frame pointer and context.
855 if (kind == StackHandler::JS_ENTRY) {
856 // The frame pointer does not point to a JS frame so we save NULL for
857 // ebp. We expect the code throwing an exception to check ebp before
858 // dereferencing it to restore the context.
859 push(Immediate(0)); // NULL frame pointer.
860 push(Immediate(Smi::FromInt(0))); // No context.
865 // Push the state and the code object.
867 StackHandler::IndexField::encode(handler_index) |
868 StackHandler::KindField::encode(kind);
869 push(Immediate(state));
872 // Link the current handler as the next handler.
873 ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
874 push(Operand::StaticVariable(handler_address));
875 // Set this new handler as the current one.
876 mov(Operand::StaticVariable(handler_address), esp);
880 void MacroAssembler::PopTryHandler() {
881 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
882 ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
883 pop(Operand::StaticVariable(handler_address));
884 add(esp, Immediate(StackHandlerConstants::kSize - kPointerSize));
888 void MacroAssembler::JumpToHandlerEntry() {
889 // Compute the handler entry address and jump to it. The handler table is
890 // a fixed array of (smi-tagged) code offsets.
891 // eax = exception, edi = code object, edx = state.
892 mov(ebx, FieldOperand(edi, Code::kHandlerTableOffset));
893 shr(edx, StackHandler::kKindWidth);
894 mov(edx, FieldOperand(ebx, edx, times_4, FixedArray::kHeaderSize));
896 lea(edi, FieldOperand(edi, edx, times_1, Code::kHeaderSize));
901 void MacroAssembler::Throw(Register value) {
902 // Adjust this code if not the case.
903 STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize);
904 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
905 STATIC_ASSERT(StackHandlerConstants::kCodeOffset == 1 * kPointerSize);
906 STATIC_ASSERT(StackHandlerConstants::kStateOffset == 2 * kPointerSize);
907 STATIC_ASSERT(StackHandlerConstants::kContextOffset == 3 * kPointerSize);
908 STATIC_ASSERT(StackHandlerConstants::kFPOffset == 4 * kPointerSize);
910 // The exception is expected in eax.
911 if (!value.is(eax)) {
914 // Drop the stack pointer to the top of the top handler.
915 ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
916 mov(esp, Operand::StaticVariable(handler_address));
917 // Restore the next handler.
918 pop(Operand::StaticVariable(handler_address));
920 // Remove the code object and state, compute the handler address in edi.
921 pop(edi); // Code object.
922 pop(edx); // Index and state.
924 // Restore the context and frame pointer.
925 pop(esi); // Context.
926 pop(ebp); // Frame pointer.
928 // If the handler is a JS frame, restore the context to the frame.
929 // (kind == ENTRY) == (ebp == 0) == (esi == 0), so we could test either
933 j(zero, &skip, Label::kNear);
934 mov(Operand(ebp, StandardFrameConstants::kContextOffset), esi);
937 JumpToHandlerEntry();
941 void MacroAssembler::ThrowUncatchable(Register value) {
942 // Adjust this code if not the case.
943 STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize);
944 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
945 STATIC_ASSERT(StackHandlerConstants::kCodeOffset == 1 * kPointerSize);
946 STATIC_ASSERT(StackHandlerConstants::kStateOffset == 2 * kPointerSize);
947 STATIC_ASSERT(StackHandlerConstants::kContextOffset == 3 * kPointerSize);
948 STATIC_ASSERT(StackHandlerConstants::kFPOffset == 4 * kPointerSize);
950 // The exception is expected in eax.
951 if (!value.is(eax)) {
954 // Drop the stack pointer to the top of the top stack handler.
955 ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
956 mov(esp, Operand::StaticVariable(handler_address));
958 // Unwind the handlers until the top ENTRY handler is found.
959 Label fetch_next, check_kind;
960 jmp(&check_kind, Label::kNear);
962 mov(esp, Operand(esp, StackHandlerConstants::kNextOffset));
965 STATIC_ASSERT(StackHandler::JS_ENTRY == 0);
966 test(Operand(esp, StackHandlerConstants::kStateOffset),
967 Immediate(StackHandler::KindField::kMask));
968 j(not_zero, &fetch_next);
970 // Set the top handler address to next handler past the top ENTRY handler.
971 pop(Operand::StaticVariable(handler_address));
973 // Remove the code object and state, compute the handler address in edi.
974 pop(edi); // Code object.
975 pop(edx); // Index and state.
977 // Clear the context pointer and frame pointer (0 was saved in the handler).
981 JumpToHandlerEntry();
985 void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
990 ASSERT(!holder_reg.is(scratch));
992 // Load current lexical context from the stack frame.
993 mov(scratch, Operand(ebp, StandardFrameConstants::kContextOffset));
995 // When generating debug code, make sure the lexical context is set.
996 if (emit_debug_code()) {
997 cmp(scratch, Immediate(0));
998 Check(not_equal, "we should not have an empty lexical context");
1000 // Load the native context of the current context.
1002 Context::kHeaderSize + Context::GLOBAL_OBJECT_INDEX * kPointerSize;
1003 mov(scratch, FieldOperand(scratch, offset));
1004 mov(scratch, FieldOperand(scratch, GlobalObject::kNativeContextOffset));
1006 // Check the context is a native context.
1007 if (emit_debug_code()) {
1009 // Read the first word and compare to native_context_map.
1010 mov(scratch, FieldOperand(scratch, HeapObject::kMapOffset));
1011 cmp(scratch, isolate()->factory()->native_context_map());
1012 Check(equal, "JSGlobalObject::native_context should be a native context.");
1016 // Check if both contexts are the same.
1017 cmp(scratch, FieldOperand(holder_reg, JSGlobalProxy::kNativeContextOffset));
1018 j(equal, &same_contexts);
1020 // Compare security tokens, save holder_reg on the stack so we can use it
1021 // as a temporary register.
1023 // TODO(119): avoid push(holder_reg)/pop(holder_reg)
1025 // Check that the security token in the calling global object is
1026 // compatible with the security token in the receiving global
1029 FieldOperand(holder_reg, JSGlobalProxy::kNativeContextOffset));
1031 // Check the context is a native context.
1032 if (emit_debug_code()) {
1033 cmp(holder_reg, isolate()->factory()->null_value());
1034 Check(not_equal, "JSGlobalProxy::context() should not be null.");
1037 // Read the first word and compare to native_context_map(),
1038 mov(holder_reg, FieldOperand(holder_reg, HeapObject::kMapOffset));
1039 cmp(holder_reg, isolate()->factory()->native_context_map());
1040 Check(equal, "JSGlobalObject::native_context should be a native context.");
1044 int token_offset = Context::kHeaderSize +
1045 Context::SECURITY_TOKEN_INDEX * kPointerSize;
1046 mov(scratch, FieldOperand(scratch, token_offset));
1047 cmp(scratch, FieldOperand(holder_reg, token_offset));
1051 bind(&same_contexts);
1055 // Compute the hash code from the untagged key. This must be kept in sync
1056 // with ComputeIntegerHash in utils.h.
1058 // Note: r0 will contain hash code
1059 void MacroAssembler::GetNumberHash(Register r0, Register scratch) {
1060 // Xor original key with a seed.
1061 if (Serializer::enabled()) {
1062 ExternalReference roots_array_start =
1063 ExternalReference::roots_array_start(isolate());
1064 mov(scratch, Immediate(Heap::kHashSeedRootIndex));
1066 Operand::StaticArray(scratch, times_pointer_size, roots_array_start));
1070 int32_t seed = isolate()->heap()->HashSeed();
1071 xor_(r0, Immediate(seed));
1074 // hash = ~hash + (hash << 15);
1079 // hash = hash ^ (hash >> 12);
1083 // hash = hash + (hash << 2);
1084 lea(r0, Operand(r0, r0, times_4, 0));
1085 // hash = hash ^ (hash >> 4);
1089 // hash = hash * 2057;
1091 // hash = hash ^ (hash >> 16);
1099 void MacroAssembler::LoadFromNumberDictionary(Label* miss,
1108 // elements - holds the slow-case elements of the receiver and is unchanged.
1110 // key - holds the smi key on entry and is unchanged.
1112 // Scratch registers:
1114 // r0 - holds the untagged key on entry and holds the hash once computed.
1116 // r1 - used to hold the capacity mask of the dictionary
1118 // r2 - used for the index into the dictionary.
1120 // result - holds the result on exit if the load succeeds and we fall through.
1124 GetNumberHash(r0, r1);
1126 // Compute capacity mask.
1127 mov(r1, FieldOperand(elements, SeededNumberDictionary::kCapacityOffset));
1128 shr(r1, kSmiTagSize); // convert smi to int
1131 // Generate an unrolled loop that performs a few probes before giving up.
1132 const int kProbes = 4;
1133 for (int i = 0; i < kProbes; i++) {
1134 // Use r2 for index calculations and keep the hash intact in r0.
1136 // Compute the masked index: (hash + i + i * i) & mask.
1138 add(r2, Immediate(SeededNumberDictionary::GetProbeOffset(i)));
1142 // Scale the index by multiplying by the entry size.
1143 ASSERT(SeededNumberDictionary::kEntrySize == 3);
1144 lea(r2, Operand(r2, r2, times_2, 0)); // r2 = r2 * 3
1146 // Check if the key matches.
1147 cmp(key, FieldOperand(elements,
1150 SeededNumberDictionary::kElementsStartOffset));
1151 if (i != (kProbes - 1)) {
1159 // Check that the value is a normal propety.
1160 const int kDetailsOffset =
1161 SeededNumberDictionary::kElementsStartOffset + 2 * kPointerSize;
1162 ASSERT_EQ(NORMAL, 0);
1163 test(FieldOperand(elements, r2, times_pointer_size, kDetailsOffset),
1164 Immediate(PropertyDetails::TypeField::kMask << kSmiTagSize));
1167 // Get the value at the masked, scaled index.
1168 const int kValueOffset =
1169 SeededNumberDictionary::kElementsStartOffset + kPointerSize;
1170 mov(result, FieldOperand(elements, r2, times_pointer_size, kValueOffset));
1174 void MacroAssembler::LoadAllocationTopHelper(Register result,
1176 AllocationFlags flags) {
1177 ExternalReference new_space_allocation_top =
1178 ExternalReference::new_space_allocation_top_address(isolate());
1180 // Just return if allocation top is already known.
1181 if ((flags & RESULT_CONTAINS_TOP) != 0) {
1182 // No use of scratch if allocation top is provided.
1183 ASSERT(scratch.is(no_reg));
1185 // Assert that result actually contains top on entry.
1186 cmp(result, Operand::StaticVariable(new_space_allocation_top));
1187 Check(equal, "Unexpected allocation top");
1192 // Move address of new object to result. Use scratch register if available.
1193 if (scratch.is(no_reg)) {
1194 mov(result, Operand::StaticVariable(new_space_allocation_top));
1196 mov(scratch, Immediate(new_space_allocation_top));
1197 mov(result, Operand(scratch, 0));
1202 void MacroAssembler::UpdateAllocationTopHelper(Register result_end,
1204 if (emit_debug_code()) {
1205 test(result_end, Immediate(kObjectAlignmentMask));
1206 Check(zero, "Unaligned allocation in new space");
1209 ExternalReference new_space_allocation_top =
1210 ExternalReference::new_space_allocation_top_address(isolate());
1212 // Update new top. Use scratch if available.
1213 if (scratch.is(no_reg)) {
1214 mov(Operand::StaticVariable(new_space_allocation_top), result_end);
1216 mov(Operand(scratch, 0), result_end);
1221 void MacroAssembler::AllocateInNewSpace(int object_size,
1223 Register result_end,
1226 AllocationFlags flags) {
1227 if (!FLAG_inline_new) {
1228 if (emit_debug_code()) {
1229 // Trash the registers to simulate an allocation failure.
1230 mov(result, Immediate(0x7091));
1231 if (result_end.is_valid()) {
1232 mov(result_end, Immediate(0x7191));
1234 if (scratch.is_valid()) {
1235 mov(scratch, Immediate(0x7291));
1241 ASSERT(!result.is(result_end));
1243 // Load address of new object into result.
1244 LoadAllocationTopHelper(result, scratch, flags);
1246 Register top_reg = result_end.is_valid() ? result_end : result;
1248 // Calculate new top and bail out if new space is exhausted.
1249 ExternalReference new_space_allocation_limit =
1250 ExternalReference::new_space_allocation_limit_address(isolate());
1252 if (!top_reg.is(result)) {
1253 mov(top_reg, result);
1255 add(top_reg, Immediate(object_size));
1256 j(carry, gc_required);
1257 cmp(top_reg, Operand::StaticVariable(new_space_allocation_limit));
1258 j(above, gc_required);
1260 // Update allocation top.
1261 UpdateAllocationTopHelper(top_reg, scratch);
1263 // Tag result if requested.
1264 if (top_reg.is(result)) {
1265 if ((flags & TAG_OBJECT) != 0) {
1266 sub(result, Immediate(object_size - kHeapObjectTag));
1268 sub(result, Immediate(object_size));
1270 } else if ((flags & TAG_OBJECT) != 0) {
1271 add(result, Immediate(kHeapObjectTag));
1276 void MacroAssembler::AllocateInNewSpace(int header_size,
1277 ScaleFactor element_size,
1278 Register element_count,
1280 Register result_end,
1283 AllocationFlags flags) {
1284 if (!FLAG_inline_new) {
1285 if (emit_debug_code()) {
1286 // Trash the registers to simulate an allocation failure.
1287 mov(result, Immediate(0x7091));
1288 mov(result_end, Immediate(0x7191));
1289 if (scratch.is_valid()) {
1290 mov(scratch, Immediate(0x7291));
1292 // Register element_count is not modified by the function.
1297 ASSERT(!result.is(result_end));
1299 // Load address of new object into result.
1300 LoadAllocationTopHelper(result, scratch, flags);
1302 // Calculate new top and bail out if new space is exhausted.
1303 ExternalReference new_space_allocation_limit =
1304 ExternalReference::new_space_allocation_limit_address(isolate());
1306 // We assume that element_count*element_size + header_size does not
1308 lea(result_end, Operand(element_count, element_size, header_size));
1309 add(result_end, result);
1310 j(carry, gc_required);
1311 cmp(result_end, Operand::StaticVariable(new_space_allocation_limit));
1312 j(above, gc_required);
1314 // Tag result if requested.
1315 if ((flags & TAG_OBJECT) != 0) {
1316 lea(result, Operand(result, kHeapObjectTag));
1319 // Update allocation top.
1320 UpdateAllocationTopHelper(result_end, scratch);
1324 void MacroAssembler::AllocateInNewSpace(Register object_size,
1326 Register result_end,
1329 AllocationFlags flags) {
1330 if (!FLAG_inline_new) {
1331 if (emit_debug_code()) {
1332 // Trash the registers to simulate an allocation failure.
1333 mov(result, Immediate(0x7091));
1334 mov(result_end, Immediate(0x7191));
1335 if (scratch.is_valid()) {
1336 mov(scratch, Immediate(0x7291));
1338 // object_size is left unchanged by this function.
1343 ASSERT(!result.is(result_end));
1345 // Load address of new object into result.
1346 LoadAllocationTopHelper(result, scratch, flags);
1348 // Calculate new top and bail out if new space is exhausted.
1349 ExternalReference new_space_allocation_limit =
1350 ExternalReference::new_space_allocation_limit_address(isolate());
1351 if (!object_size.is(result_end)) {
1352 mov(result_end, object_size);
1354 add(result_end, result);
1355 j(carry, gc_required);
1356 cmp(result_end, Operand::StaticVariable(new_space_allocation_limit));
1357 j(above, gc_required);
1359 // Tag result if requested.
1360 if ((flags & TAG_OBJECT) != 0) {
1361 lea(result, Operand(result, kHeapObjectTag));
1364 // Update allocation top.
1365 UpdateAllocationTopHelper(result_end, scratch);
1369 void MacroAssembler::UndoAllocationInNewSpace(Register object) {
1370 ExternalReference new_space_allocation_top =
1371 ExternalReference::new_space_allocation_top_address(isolate());
1373 // Make sure the object has no tag before resetting top.
1374 and_(object, Immediate(~kHeapObjectTagMask));
1376 cmp(object, Operand::StaticVariable(new_space_allocation_top));
1377 Check(below, "Undo allocation of non allocated memory");
1379 mov(Operand::StaticVariable(new_space_allocation_top), object);
1383 void MacroAssembler::AllocateHeapNumber(Register result,
1386 Label* gc_required) {
1387 // Allocate heap number in new space.
1388 AllocateInNewSpace(HeapNumber::kSize,
1396 mov(FieldOperand(result, HeapObject::kMapOffset),
1397 Immediate(isolate()->factory()->heap_number_map()));
1401 void MacroAssembler::AllocateTwoByteString(Register result,
1406 Label* gc_required) {
1407 // Calculate the number of bytes needed for the characters in the string while
1408 // observing object alignment.
1409 ASSERT((SeqTwoByteString::kHeaderSize & kObjectAlignmentMask) == 0);
1410 ASSERT(kShortSize == 2);
1411 // scratch1 = length * 2 + kObjectAlignmentMask.
1412 lea(scratch1, Operand(length, length, times_1, kObjectAlignmentMask));
1413 and_(scratch1, Immediate(~kObjectAlignmentMask));
1415 // Allocate two byte string in new space.
1416 AllocateInNewSpace(SeqTwoByteString::kHeaderSize,
1425 // Set the map, length and hash field.
1426 mov(FieldOperand(result, HeapObject::kMapOffset),
1427 Immediate(isolate()->factory()->string_map()));
1428 mov(scratch1, length);
1430 mov(FieldOperand(result, String::kLengthOffset), scratch1);
1431 mov(FieldOperand(result, String::kHashFieldOffset),
1432 Immediate(String::kEmptyHashField));
1436 void MacroAssembler::AllocateAsciiString(Register result,
1441 Label* gc_required) {
1442 // Calculate the number of bytes needed for the characters in the string while
1443 // observing object alignment.
1444 ASSERT((SeqAsciiString::kHeaderSize & kObjectAlignmentMask) == 0);
1445 mov(scratch1, length);
1446 ASSERT(kCharSize == 1);
1447 add(scratch1, Immediate(kObjectAlignmentMask));
1448 and_(scratch1, Immediate(~kObjectAlignmentMask));
1450 // Allocate ASCII string in new space.
1451 AllocateInNewSpace(SeqAsciiString::kHeaderSize,
1460 // Set the map, length and hash field.
1461 mov(FieldOperand(result, HeapObject::kMapOffset),
1462 Immediate(isolate()->factory()->ascii_string_map()));
1463 mov(scratch1, length);
1465 mov(FieldOperand(result, String::kLengthOffset), scratch1);
1466 mov(FieldOperand(result, String::kHashFieldOffset),
1467 Immediate(String::kEmptyHashField));
1471 void MacroAssembler::AllocateAsciiString(Register result,
1475 Label* gc_required) {
1478 // Allocate ASCII string in new space.
1479 AllocateInNewSpace(SeqAsciiString::SizeFor(length),
1486 // Set the map, length and hash field.
1487 mov(FieldOperand(result, HeapObject::kMapOffset),
1488 Immediate(isolate()->factory()->ascii_string_map()));
1489 mov(FieldOperand(result, String::kLengthOffset),
1490 Immediate(Smi::FromInt(length)));
1491 mov(FieldOperand(result, String::kHashFieldOffset),
1492 Immediate(String::kEmptyHashField));
1496 void MacroAssembler::AllocateTwoByteConsString(Register result,
1499 Label* gc_required) {
1500 // Allocate heap number in new space.
1501 AllocateInNewSpace(ConsString::kSize,
1508 // Set the map. The other fields are left uninitialized.
1509 mov(FieldOperand(result, HeapObject::kMapOffset),
1510 Immediate(isolate()->factory()->cons_string_map()));
1514 void MacroAssembler::AllocateAsciiConsString(Register result,
1517 Label* gc_required) {
1518 // Allocate heap number in new space.
1519 AllocateInNewSpace(ConsString::kSize,
1526 // Set the map. The other fields are left uninitialized.
1527 mov(FieldOperand(result, HeapObject::kMapOffset),
1528 Immediate(isolate()->factory()->cons_ascii_string_map()));
1532 void MacroAssembler::AllocateTwoByteSlicedString(Register result,
1535 Label* gc_required) {
1536 // Allocate heap number in new space.
1537 AllocateInNewSpace(SlicedString::kSize,
1544 // Set the map. The other fields are left uninitialized.
1545 mov(FieldOperand(result, HeapObject::kMapOffset),
1546 Immediate(isolate()->factory()->sliced_string_map()));
1550 void MacroAssembler::AllocateAsciiSlicedString(Register result,
1553 Label* gc_required) {
1554 // Allocate heap number in new space.
1555 AllocateInNewSpace(SlicedString::kSize,
1562 // Set the map. The other fields are left uninitialized.
1563 mov(FieldOperand(result, HeapObject::kMapOffset),
1564 Immediate(isolate()->factory()->sliced_ascii_string_map()));
1568 // Copy memory, byte-by-byte, from source to destination. Not optimized for
1569 // long or aligned copies. The contents of scratch and length are destroyed.
1570 // Source and destination are incremented by length.
1571 // Many variants of movsb, loop unrolling, word moves, and indexed operands
1572 // have been tried here already, and this is fastest.
1573 // A simpler loop is faster on small copies, but 30% slower on large ones.
1574 // The cld() instruction must have been emitted, to set the direction flag(),
1575 // before calling this function.
1576 void MacroAssembler::CopyBytes(Register source,
1577 Register destination,
1580 Label loop, done, short_string, short_loop;
1581 // Experimentation shows that the short string loop is faster if length < 10.
1582 cmp(length, Immediate(10));
1583 j(less_equal, &short_string);
1585 ASSERT(source.is(esi));
1586 ASSERT(destination.is(edi));
1587 ASSERT(length.is(ecx));
1589 // Because source is 4-byte aligned in our uses of this function,
1590 // we keep source aligned for the rep_movs call by copying the odd bytes
1591 // at the end of the ranges.
1592 mov(scratch, Operand(source, length, times_1, -4));
1593 mov(Operand(destination, length, times_1, -4), scratch);
1597 and_(scratch, Immediate(0x3));
1598 add(destination, scratch);
1601 bind(&short_string);
1602 test(length, length);
1606 mov_b(scratch, Operand(source, 0));
1607 mov_b(Operand(destination, 0), scratch);
1611 j(not_zero, &short_loop);
1617 void MacroAssembler::InitializeFieldsWithFiller(Register start_offset,
1618 Register end_offset,
1623 mov(Operand(start_offset, 0), filler);
1624 add(start_offset, Immediate(kPointerSize));
1626 cmp(start_offset, end_offset);
1631 void MacroAssembler::BooleanBitTest(Register object,
1634 bit_index += kSmiTagSize + kSmiShiftSize;
1635 ASSERT(IsPowerOf2(kBitsPerByte));
1636 int byte_index = bit_index / kBitsPerByte;
1637 int byte_bit_index = bit_index & (kBitsPerByte - 1);
1638 test_b(FieldOperand(object, field_offset + byte_index),
1639 static_cast<byte>(1 << byte_bit_index));
1644 void MacroAssembler::NegativeZeroTest(Register result,
1646 Label* then_label) {
1648 test(result, result);
1651 j(sign, then_label);
1656 void MacroAssembler::NegativeZeroTest(Register result,
1660 Label* then_label) {
1662 test(result, result);
1666 j(sign, then_label);
1671 void MacroAssembler::TryGetFunctionPrototype(Register function,
1675 bool miss_on_bound_function) {
1676 // Check that the receiver isn't a smi.
1677 JumpIfSmi(function, miss);
1679 // Check that the function really is a function.
1680 CmpObjectType(function, JS_FUNCTION_TYPE, result);
1683 if (miss_on_bound_function) {
1684 // If a bound function, go to miss label.
1686 FieldOperand(function, JSFunction::kSharedFunctionInfoOffset));
1687 BooleanBitTest(scratch, SharedFunctionInfo::kCompilerHintsOffset,
1688 SharedFunctionInfo::kBoundFunction);
1692 // Make sure that the function has an instance prototype.
1694 movzx_b(scratch, FieldOperand(result, Map::kBitFieldOffset));
1695 test(scratch, Immediate(1 << Map::kHasNonInstancePrototype));
1696 j(not_zero, &non_instance);
1698 // Get the prototype or initial map from the function.
1700 FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
1702 // If the prototype or initial map is the hole, don't return it and
1703 // simply miss the cache instead. This will allow us to allocate a
1704 // prototype object on-demand in the runtime system.
1705 cmp(result, Immediate(isolate()->factory()->the_hole_value()));
1708 // If the function does not have an initial map, we're done.
1710 CmpObjectType(result, MAP_TYPE, scratch);
1711 j(not_equal, &done);
1713 // Get the prototype from the initial map.
1714 mov(result, FieldOperand(result, Map::kPrototypeOffset));
1717 // Non-instance prototype: Fetch prototype from constructor field
1719 bind(&non_instance);
1720 mov(result, FieldOperand(result, Map::kConstructorOffset));
1727 void MacroAssembler::CallStub(CodeStub* stub, TypeFeedbackId ast_id) {
1728 ASSERT(AllowThisStubCall(stub)); // Calls are not allowed in some stubs.
1729 call(stub->GetCode(), RelocInfo::CODE_TARGET, ast_id);
1733 void MacroAssembler::TailCallStub(CodeStub* stub) {
1734 ASSERT(allow_stub_calls_ || stub->CompilingCallsToThisStubIsGCSafe());
1735 jmp(stub->GetCode(), RelocInfo::CODE_TARGET);
1739 void MacroAssembler::StubReturn(int argc) {
1740 ASSERT(argc >= 1 && generating_stub());
1741 ret((argc - 1) * kPointerSize);
1745 bool MacroAssembler::AllowThisStubCall(CodeStub* stub) {
1746 if (!has_frame_ && stub->SometimesSetsUpAFrame()) return false;
1747 return allow_stub_calls_ || stub->CompilingCallsToThisStubIsGCSafe();
1751 void MacroAssembler::IllegalOperation(int num_arguments) {
1752 if (num_arguments > 0) {
1753 add(esp, Immediate(num_arguments * kPointerSize));
1755 mov(eax, Immediate(isolate()->factory()->undefined_value()));
1759 void MacroAssembler::IndexFromHash(Register hash, Register index) {
1760 // The assert checks that the constants for the maximum number of digits
1761 // for an array index cached in the hash field and the number of bits
1762 // reserved for it does not conflict.
1763 ASSERT(TenToThe(String::kMaxCachedArrayIndexLength) <
1764 (1 << String::kArrayIndexValueBits));
1765 // We want the smi-tagged index in key. kArrayIndexValueMask has zeros in
1766 // the low kHashShift bits.
1767 and_(hash, String::kArrayIndexValueMask);
1768 STATIC_ASSERT(String::kHashShift >= kSmiTagSize && kSmiTag == 0);
1769 if (String::kHashShift > kSmiTagSize) {
1770 shr(hash, String::kHashShift - kSmiTagSize);
1772 if (!index.is(hash)) {
1778 void MacroAssembler::CallRuntime(Runtime::FunctionId id, int num_arguments) {
1779 CallRuntime(Runtime::FunctionForId(id), num_arguments);
1783 void MacroAssembler::CallRuntimeSaveDoubles(Runtime::FunctionId id) {
1784 const Runtime::Function* function = Runtime::FunctionForId(id);
1785 Set(eax, Immediate(function->nargs));
1786 mov(ebx, Immediate(ExternalReference(function, isolate())));
1787 CEntryStub ces(1, kSaveFPRegs);
1792 void MacroAssembler::CallRuntime(const Runtime::Function* f,
1793 int num_arguments) {
1794 // If the expected number of arguments of the runtime function is
1795 // constant, we check that the actual number of arguments match the
1797 if (f->nargs >= 0 && f->nargs != num_arguments) {
1798 IllegalOperation(num_arguments);
1802 // TODO(1236192): Most runtime routines don't need the number of
1803 // arguments passed in because it is constant. At some point we
1804 // should remove this need and make the runtime routine entry code
1806 Set(eax, Immediate(num_arguments));
1807 mov(ebx, Immediate(ExternalReference(f, isolate())));
1813 void MacroAssembler::CallExternalReference(ExternalReference ref,
1814 int num_arguments) {
1815 mov(eax, Immediate(num_arguments));
1816 mov(ebx, Immediate(ref));
1823 void MacroAssembler::TailCallExternalReference(const ExternalReference& ext,
1826 // TODO(1236192): Most runtime routines don't need the number of
1827 // arguments passed in because it is constant. At some point we
1828 // should remove this need and make the runtime routine entry code
1830 Set(eax, Immediate(num_arguments));
1831 JumpToExternalReference(ext);
1835 void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid,
1838 TailCallExternalReference(ExternalReference(fid, isolate()),
1844 // If true, a Handle<T> returned by value from a function with cdecl calling
1845 // convention will be returned directly as a value of location_ field in a
1847 // If false, it is returned as a pointer to a preallocated by caller memory
1848 // region. Pointer to this region should be passed to a function as an
1849 // implicit first argument.
1850 #if defined(USING_BSD_ABI) || defined(__MINGW32__) || defined(__CYGWIN__)
1851 static const bool kReturnHandlesDirectly = true;
1853 static const bool kReturnHandlesDirectly = false;
1857 Operand ApiParameterOperand(int index) {
1859 esp, (index + (kReturnHandlesDirectly ? 0 : 1)) * kPointerSize);
1863 void MacroAssembler::PrepareCallApiFunction(int argc) {
1864 if (kReturnHandlesDirectly) {
1865 EnterApiExitFrame(argc);
1866 // When handles are returned directly we don't have to allocate extra
1867 // space for and pass an out parameter.
1868 if (emit_debug_code()) {
1869 mov(esi, Immediate(BitCast<int32_t>(kZapValue)));
1872 // We allocate two additional slots: return value and pointer to it.
1873 EnterApiExitFrame(argc + 2);
1875 // The argument slots are filled as follows:
1877 // n + 1: output slot
1881 // 0: pointer to the output slot
1883 lea(esi, Operand(esp, (argc + 1) * kPointerSize));
1884 mov(Operand(esp, 0 * kPointerSize), esi);
1885 if (emit_debug_code()) {
1886 mov(Operand(esi, 0), Immediate(0));
1892 void MacroAssembler::CallApiFunctionAndReturn(Address function_address,
1894 ExternalReference next_address =
1895 ExternalReference::handle_scope_next_address();
1896 ExternalReference limit_address =
1897 ExternalReference::handle_scope_limit_address();
1898 ExternalReference level_address =
1899 ExternalReference::handle_scope_level_address();
1901 // Allocate HandleScope in callee-save registers.
1902 mov(ebx, Operand::StaticVariable(next_address));
1903 mov(edi, Operand::StaticVariable(limit_address));
1904 add(Operand::StaticVariable(level_address), Immediate(1));
1906 // Call the api function.
1907 call(function_address, RelocInfo::RUNTIME_ENTRY);
1909 if (!kReturnHandlesDirectly) {
1910 // PrepareCallApiFunction saved pointer to the output slot into
1911 // callee-save register esi.
1912 mov(eax, Operand(esi, 0));
1917 Label promote_scheduled_exception;
1918 Label delete_allocated_handles;
1919 Label leave_exit_frame;
1921 // Check if the result handle holds 0.
1923 j(zero, &empty_handle);
1924 // It was non-zero. Dereference to get the result value.
1925 mov(eax, Operand(eax, 0));
1927 // No more valid handles (the result handle was the last one). Restore
1928 // previous handle scope.
1929 mov(Operand::StaticVariable(next_address), ebx);
1930 sub(Operand::StaticVariable(level_address), Immediate(1));
1931 Assert(above_equal, "Invalid HandleScope level");
1932 cmp(edi, Operand::StaticVariable(limit_address));
1933 j(not_equal, &delete_allocated_handles);
1934 bind(&leave_exit_frame);
1936 // Check if the function scheduled an exception.
1937 ExternalReference scheduled_exception_address =
1938 ExternalReference::scheduled_exception_address(isolate());
1939 cmp(Operand::StaticVariable(scheduled_exception_address),
1940 Immediate(isolate()->factory()->the_hole_value()));
1941 j(not_equal, &promote_scheduled_exception);
1943 #if ENABLE_EXTRA_CHECKS
1944 // Check if the function returned a valid JavaScript value.
1946 Register return_value = eax;
1949 JumpIfSmi(return_value, &ok, Label::kNear);
1950 mov(map, FieldOperand(return_value, HeapObject::kMapOffset));
1952 CmpInstanceType(map, FIRST_NONSTRING_TYPE);
1953 j(below, &ok, Label::kNear);
1955 CmpInstanceType(map, FIRST_SPEC_OBJECT_TYPE);
1956 j(above_equal, &ok, Label::kNear);
1958 cmp(map, isolate()->factory()->heap_number_map());
1959 j(equal, &ok, Label::kNear);
1961 cmp(return_value, isolate()->factory()->undefined_value());
1962 j(equal, &ok, Label::kNear);
1964 cmp(return_value, isolate()->factory()->true_value());
1965 j(equal, &ok, Label::kNear);
1967 cmp(return_value, isolate()->factory()->false_value());
1968 j(equal, &ok, Label::kNear);
1970 cmp(return_value, isolate()->factory()->null_value());
1971 j(equal, &ok, Label::kNear);
1973 Abort("API call returned invalid object");
1978 LeaveApiExitFrame();
1979 ret(stack_space * kPointerSize);
1981 bind(&empty_handle);
1982 // It was zero; the result is undefined.
1983 mov(eax, isolate()->factory()->undefined_value());
1986 bind(&promote_scheduled_exception);
1987 TailCallRuntime(Runtime::kPromoteScheduledException, 0, 1);
1989 // HandleScope limit has changed. Delete allocated extensions.
1990 ExternalReference delete_extensions =
1991 ExternalReference::delete_handle_scope_extensions(isolate());
1992 bind(&delete_allocated_handles);
1993 mov(Operand::StaticVariable(limit_address), edi);
1995 mov(Operand(esp, 0), Immediate(ExternalReference::isolate_address()));
1996 mov(eax, Immediate(delete_extensions));
1999 jmp(&leave_exit_frame);
2003 void MacroAssembler::JumpToExternalReference(const ExternalReference& ext) {
2004 // Set the entry point and jump to the C entry runtime stub.
2005 mov(ebx, Immediate(ext));
2007 jmp(ces.GetCode(), RelocInfo::CODE_TARGET);
2011 void MacroAssembler::SetCallKind(Register dst, CallKind call_kind) {
2012 // This macro takes the dst register to make the code more readable
2013 // at the call sites. However, the dst register has to be ecx to
2014 // follow the calling convention which requires the call type to be
2016 ASSERT(dst.is(ecx));
2017 if (call_kind == CALL_AS_FUNCTION) {
2018 // Set to some non-zero smi by updating the least significant
2020 mov_b(dst, 1 << kSmiTagSize);
2022 // Set to smi zero by clearing the register.
2028 void MacroAssembler::InvokePrologue(const ParameterCount& expected,
2029 const ParameterCount& actual,
2030 Handle<Code> code_constant,
2031 const Operand& code_operand,
2033 bool* definitely_mismatches,
2035 Label::Distance done_near,
2036 const CallWrapper& call_wrapper,
2037 CallKind call_kind) {
2038 bool definitely_matches = false;
2039 *definitely_mismatches = false;
2041 if (expected.is_immediate()) {
2042 ASSERT(actual.is_immediate());
2043 if (expected.immediate() == actual.immediate()) {
2044 definitely_matches = true;
2046 mov(eax, actual.immediate());
2047 const int sentinel = SharedFunctionInfo::kDontAdaptArgumentsSentinel;
2048 if (expected.immediate() == sentinel) {
2049 // Don't worry about adapting arguments for builtins that
2050 // don't want that done. Skip adaption code by making it look
2051 // like we have a match between expected and actual number of
2053 definitely_matches = true;
2055 *definitely_mismatches = true;
2056 mov(ebx, expected.immediate());
2060 if (actual.is_immediate()) {
2061 // Expected is in register, actual is immediate. This is the
2062 // case when we invoke function values without going through the
2064 cmp(expected.reg(), actual.immediate());
2066 ASSERT(expected.reg().is(ebx));
2067 mov(eax, actual.immediate());
2068 } else if (!expected.reg().is(actual.reg())) {
2069 // Both expected and actual are in (different) registers. This
2070 // is the case when we invoke functions using call and apply.
2071 cmp(expected.reg(), actual.reg());
2073 ASSERT(actual.reg().is(eax));
2074 ASSERT(expected.reg().is(ebx));
2078 if (!definitely_matches) {
2079 Handle<Code> adaptor =
2080 isolate()->builtins()->ArgumentsAdaptorTrampoline();
2081 if (!code_constant.is_null()) {
2082 mov(edx, Immediate(code_constant));
2083 add(edx, Immediate(Code::kHeaderSize - kHeapObjectTag));
2084 } else if (!code_operand.is_reg(edx)) {
2085 mov(edx, code_operand);
2088 if (flag == CALL_FUNCTION) {
2089 call_wrapper.BeforeCall(CallSize(adaptor, RelocInfo::CODE_TARGET));
2090 SetCallKind(ecx, call_kind);
2091 call(adaptor, RelocInfo::CODE_TARGET);
2092 call_wrapper.AfterCall();
2093 if (!*definitely_mismatches) {
2094 jmp(done, done_near);
2097 SetCallKind(ecx, call_kind);
2098 jmp(adaptor, RelocInfo::CODE_TARGET);
2105 void MacroAssembler::InvokeCode(const Operand& code,
2106 const ParameterCount& expected,
2107 const ParameterCount& actual,
2109 const CallWrapper& call_wrapper,
2110 CallKind call_kind) {
2111 // You can't call a function without a valid frame.
2112 ASSERT(flag == JUMP_FUNCTION || has_frame());
2115 bool definitely_mismatches = false;
2116 InvokePrologue(expected, actual, Handle<Code>::null(), code,
2117 &done, &definitely_mismatches, flag, Label::kNear,
2118 call_wrapper, call_kind);
2119 if (!definitely_mismatches) {
2120 if (flag == CALL_FUNCTION) {
2121 call_wrapper.BeforeCall(CallSize(code));
2122 SetCallKind(ecx, call_kind);
2124 call_wrapper.AfterCall();
2126 ASSERT(flag == JUMP_FUNCTION);
2127 SetCallKind(ecx, call_kind);
2135 void MacroAssembler::InvokeCode(Handle<Code> code,
2136 const ParameterCount& expected,
2137 const ParameterCount& actual,
2138 RelocInfo::Mode rmode,
2140 const CallWrapper& call_wrapper,
2141 CallKind call_kind) {
2142 // You can't call a function without a valid frame.
2143 ASSERT(flag == JUMP_FUNCTION || has_frame());
2146 Operand dummy(eax, 0);
2147 bool definitely_mismatches = false;
2148 InvokePrologue(expected, actual, code, dummy, &done, &definitely_mismatches,
2149 flag, Label::kNear, call_wrapper, call_kind);
2150 if (!definitely_mismatches) {
2151 if (flag == CALL_FUNCTION) {
2152 call_wrapper.BeforeCall(CallSize(code, rmode));
2153 SetCallKind(ecx, call_kind);
2155 call_wrapper.AfterCall();
2157 ASSERT(flag == JUMP_FUNCTION);
2158 SetCallKind(ecx, call_kind);
2166 void MacroAssembler::InvokeFunction(Register fun,
2167 const ParameterCount& actual,
2169 const CallWrapper& call_wrapper,
2170 CallKind call_kind) {
2171 // You can't call a function without a valid frame.
2172 ASSERT(flag == JUMP_FUNCTION || has_frame());
2174 ASSERT(fun.is(edi));
2175 mov(edx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
2176 mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
2177 mov(ebx, FieldOperand(edx, SharedFunctionInfo::kFormalParameterCountOffset));
2180 ParameterCount expected(ebx);
2181 InvokeCode(FieldOperand(edi, JSFunction::kCodeEntryOffset),
2182 expected, actual, flag, call_wrapper, call_kind);
2186 void MacroAssembler::InvokeFunction(Handle<JSFunction> function,
2187 const ParameterCount& actual,
2189 const CallWrapper& call_wrapper,
2190 CallKind call_kind) {
2191 // You can't call a function without a valid frame.
2192 ASSERT(flag == JUMP_FUNCTION || has_frame());
2194 // Get the function and setup the context.
2195 LoadHeapObject(edi, function);
2196 mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
2198 ParameterCount expected(function->shared()->formal_parameter_count());
2199 // We call indirectly through the code field in the function to
2200 // allow recompilation to take effect without changing any of the
2202 InvokeCode(FieldOperand(edi, JSFunction::kCodeEntryOffset),
2203 expected, actual, flag, call_wrapper, call_kind);
2207 void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id,
2209 const CallWrapper& call_wrapper) {
2210 // You can't call a builtin without a valid frame.
2211 ASSERT(flag == JUMP_FUNCTION || has_frame());
2213 // Rely on the assertion to check that the number of provided
2214 // arguments match the expected number of arguments. Fake a
2215 // parameter count to avoid emitting code to do the check.
2216 ParameterCount expected(0);
2217 GetBuiltinFunction(edi, id);
2218 InvokeCode(FieldOperand(edi, JSFunction::kCodeEntryOffset),
2219 expected, expected, flag, call_wrapper, CALL_AS_METHOD);
2223 void MacroAssembler::GetBuiltinFunction(Register target,
2224 Builtins::JavaScript id) {
2225 // Load the JavaScript builtin function from the builtins object.
2226 mov(target, Operand(esi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
2227 mov(target, FieldOperand(target, GlobalObject::kBuiltinsOffset));
2228 mov(target, FieldOperand(target,
2229 JSBuiltinsObject::OffsetOfFunctionWithId(id)));
2233 void MacroAssembler::GetBuiltinEntry(Register target, Builtins::JavaScript id) {
2234 ASSERT(!target.is(edi));
2235 // Load the JavaScript builtin function from the builtins object.
2236 GetBuiltinFunction(edi, id);
2237 // Load the code entry point from the function into the target register.
2238 mov(target, FieldOperand(edi, JSFunction::kCodeEntryOffset));
2242 void MacroAssembler::LoadContext(Register dst, int context_chain_length) {
2243 if (context_chain_length > 0) {
2244 // Move up the chain of contexts to the context containing the slot.
2245 mov(dst, Operand(esi, Context::SlotOffset(Context::PREVIOUS_INDEX)));
2246 for (int i = 1; i < context_chain_length; i++) {
2247 mov(dst, Operand(dst, Context::SlotOffset(Context::PREVIOUS_INDEX)));
2250 // Slot is in the current function context. Move it into the
2251 // destination register in case we store into it (the write barrier
2252 // cannot be allowed to destroy the context in esi).
2256 // We should not have found a with context by walking the context chain
2257 // (i.e., the static scope chain and runtime context chain do not agree).
2258 // A variable occurring in such a scope should have slot type LOOKUP and
2260 if (emit_debug_code()) {
2261 cmp(FieldOperand(dst, HeapObject::kMapOffset),
2262 isolate()->factory()->with_context_map());
2263 Check(not_equal, "Variable resolved to with context.");
2268 void MacroAssembler::LoadTransitionedArrayMapConditional(
2269 ElementsKind expected_kind,
2270 ElementsKind transitioned_kind,
2271 Register map_in_out,
2273 Label* no_map_match) {
2274 // Load the global or builtins object from the current context.
2275 mov(scratch, Operand(esi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
2276 mov(scratch, FieldOperand(scratch, GlobalObject::kNativeContextOffset));
2278 // Check that the function's map is the same as the expected cached map.
2279 mov(scratch, Operand(scratch,
2280 Context::SlotOffset(Context::JS_ARRAY_MAPS_INDEX)));
2282 size_t offset = expected_kind * kPointerSize +
2283 FixedArrayBase::kHeaderSize;
2284 cmp(map_in_out, FieldOperand(scratch, offset));
2285 j(not_equal, no_map_match);
2287 // Use the transitioned cached map.
2288 offset = transitioned_kind * kPointerSize +
2289 FixedArrayBase::kHeaderSize;
2290 mov(map_in_out, FieldOperand(scratch, offset));
2294 void MacroAssembler::LoadInitialArrayMap(
2295 Register function_in, Register scratch,
2296 Register map_out, bool can_have_holes) {
2297 ASSERT(!function_in.is(map_out));
2299 mov(map_out, FieldOperand(function_in,
2300 JSFunction::kPrototypeOrInitialMapOffset));
2301 if (!FLAG_smi_only_arrays) {
2302 ElementsKind kind = can_have_holes ? FAST_HOLEY_ELEMENTS : FAST_ELEMENTS;
2303 LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS,
2308 } else if (can_have_holes) {
2309 LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS,
2310 FAST_HOLEY_SMI_ELEMENTS,
2319 void MacroAssembler::LoadGlobalFunction(int index, Register function) {
2320 // Load the global or builtins object from the current context.
2322 Operand(esi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
2323 // Load the native context from the global or builtins object.
2324 mov(function, FieldOperand(function, GlobalObject::kNativeContextOffset));
2325 // Load the function from the native context.
2326 mov(function, Operand(function, Context::SlotOffset(index)));
2330 void MacroAssembler::LoadGlobalFunctionInitialMap(Register function,
2332 // Load the initial map. The global functions all have initial maps.
2333 mov(map, FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
2334 if (emit_debug_code()) {
2336 CheckMap(map, isolate()->factory()->meta_map(), &fail, DO_SMI_CHECK);
2339 Abort("Global functions must have initial map");
2345 // Store the value in register src in the safepoint register stack
2346 // slot for register dst.
2347 void MacroAssembler::StoreToSafepointRegisterSlot(Register dst, Register src) {
2348 mov(SafepointRegisterSlot(dst), src);
2352 void MacroAssembler::StoreToSafepointRegisterSlot(Register dst, Immediate src) {
2353 mov(SafepointRegisterSlot(dst), src);
2357 void MacroAssembler::LoadFromSafepointRegisterSlot(Register dst, Register src) {
2358 mov(dst, SafepointRegisterSlot(src));
2362 Operand MacroAssembler::SafepointRegisterSlot(Register reg) {
2363 return Operand(esp, SafepointRegisterStackIndex(reg.code()) * kPointerSize);
2367 int MacroAssembler::SafepointRegisterStackIndex(int reg_code) {
2368 // The registers are pushed starting with the lowest encoding,
2369 // which means that lowest encodings are furthest away from
2370 // the stack pointer.
2371 ASSERT(reg_code >= 0 && reg_code < kNumSafepointRegisters);
2372 return kNumSafepointRegisters - reg_code - 1;
2376 void MacroAssembler::LoadHeapObject(Register result,
2377 Handle<HeapObject> object) {
2378 if (isolate()->heap()->InNewSpace(*object)) {
2379 Handle<JSGlobalPropertyCell> cell =
2380 isolate()->factory()->NewJSGlobalPropertyCell(object);
2381 mov(result, Operand::Cell(cell));
2383 mov(result, object);
2388 void MacroAssembler::PushHeapObject(Handle<HeapObject> object) {
2389 if (isolate()->heap()->InNewSpace(*object)) {
2390 Handle<JSGlobalPropertyCell> cell =
2391 isolate()->factory()->NewJSGlobalPropertyCell(object);
2392 push(Operand::Cell(cell));
2399 void MacroAssembler::Ret() {
2404 void MacroAssembler::Ret(int bytes_dropped, Register scratch) {
2405 if (is_uint16(bytes_dropped)) {
2409 add(esp, Immediate(bytes_dropped));
2416 void MacroAssembler::Drop(int stack_elements) {
2417 if (stack_elements > 0) {
2418 add(esp, Immediate(stack_elements * kPointerSize));
2423 void MacroAssembler::Move(Register dst, Register src) {
2430 void MacroAssembler::SetCounter(StatsCounter* counter, int value) {
2431 if (FLAG_native_code_counters && counter->Enabled()) {
2432 mov(Operand::StaticVariable(ExternalReference(counter)), Immediate(value));
2437 void MacroAssembler::IncrementCounter(StatsCounter* counter, int value) {
2439 if (FLAG_native_code_counters && counter->Enabled()) {
2440 Operand operand = Operand::StaticVariable(ExternalReference(counter));
2444 add(operand, Immediate(value));
2450 void MacroAssembler::DecrementCounter(StatsCounter* counter, int value) {
2452 if (FLAG_native_code_counters && counter->Enabled()) {
2453 Operand operand = Operand::StaticVariable(ExternalReference(counter));
2457 sub(operand, Immediate(value));
2463 void MacroAssembler::IncrementCounter(Condition cc,
2464 StatsCounter* counter,
2467 if (FLAG_native_code_counters && counter->Enabled()) {
2469 j(NegateCondition(cc), &skip);
2471 IncrementCounter(counter, value);
2478 void MacroAssembler::DecrementCounter(Condition cc,
2479 StatsCounter* counter,
2482 if (FLAG_native_code_counters && counter->Enabled()) {
2484 j(NegateCondition(cc), &skip);
2486 DecrementCounter(counter, value);
2493 void MacroAssembler::Assert(Condition cc, const char* msg) {
2494 if (emit_debug_code()) Check(cc, msg);
2498 void MacroAssembler::AssertFastElements(Register elements) {
2499 if (emit_debug_code()) {
2500 Factory* factory = isolate()->factory();
2502 cmp(FieldOperand(elements, HeapObject::kMapOffset),
2503 Immediate(factory->fixed_array_map()));
2505 cmp(FieldOperand(elements, HeapObject::kMapOffset),
2506 Immediate(factory->fixed_double_array_map()));
2508 cmp(FieldOperand(elements, HeapObject::kMapOffset),
2509 Immediate(factory->fixed_cow_array_map()));
2511 Abort("JSObject with fast elements map has slow elements");
2517 void MacroAssembler::Check(Condition cc, const char* msg) {
2521 // will not return here
2526 void MacroAssembler::CheckStackAlignment() {
2527 int frame_alignment = OS::ActivationFrameAlignment();
2528 int frame_alignment_mask = frame_alignment - 1;
2529 if (frame_alignment > kPointerSize) {
2530 ASSERT(IsPowerOf2(frame_alignment));
2531 Label alignment_as_expected;
2532 test(esp, Immediate(frame_alignment_mask));
2533 j(zero, &alignment_as_expected);
2534 // Abort if stack is not aligned.
2536 bind(&alignment_as_expected);
2541 void MacroAssembler::Abort(const char* msg) {
2542 // We want to pass the msg string like a smi to avoid GC
2543 // problems, however msg is not guaranteed to be aligned
2544 // properly. Instead, we pass an aligned pointer that is
2545 // a proper v8 smi, but also pass the alignment difference
2546 // from the real pointer as a smi.
2547 intptr_t p1 = reinterpret_cast<intptr_t>(msg);
2548 intptr_t p0 = (p1 & ~kSmiTagMask) + kSmiTag;
2549 ASSERT(reinterpret_cast<Object*>(p0)->IsSmi());
2552 RecordComment("Abort message: ");
2558 push(Immediate(p0));
2559 push(Immediate(reinterpret_cast<intptr_t>(Smi::FromInt(p1 - p0))));
2560 // Disable stub call restrictions to always allow calls to abort.
2562 // We don't actually want to generate a pile of code for this, so just
2563 // claim there is a stack frame, without generating one.
2564 FrameScope scope(this, StackFrame::NONE);
2565 CallRuntime(Runtime::kAbort, 2);
2567 CallRuntime(Runtime::kAbort, 2);
2569 // will not return here
2574 void MacroAssembler::LoadInstanceDescriptors(Register map,
2575 Register descriptors) {
2576 Register temp = descriptors;
2577 mov(temp, FieldOperand(map, Map::kTransitionsOrBackPointerOffset));
2581 isolate()->factory()->fixed_array_map(),
2584 mov(descriptors, FieldOperand(temp, TransitionArray::kDescriptorsOffset));
2587 mov(descriptors, isolate()->factory()->empty_descriptor_array());
2592 void MacroAssembler::LoadPowerOf2(XMMRegister dst,
2595 ASSERT(is_uintn(power + HeapNumber::kExponentBias,
2596 HeapNumber::kExponentBits));
2597 mov(scratch, Immediate(power + HeapNumber::kExponentBias));
2599 psllq(dst, HeapNumber::kMantissaBits);
2603 void MacroAssembler::JumpIfInstanceTypeIsNotSequentialAscii(
2604 Register instance_type,
2607 if (!scratch.is(instance_type)) {
2608 mov(scratch, instance_type);
2611 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask);
2612 cmp(scratch, kStringTag | kSeqStringTag | kAsciiStringTag);
2613 j(not_equal, failure);
2617 void MacroAssembler::JumpIfNotBothSequentialAsciiStrings(Register object1,
2622 // Check that both objects are not smis.
2623 STATIC_ASSERT(kSmiTag == 0);
2624 mov(scratch1, object1);
2625 and_(scratch1, object2);
2626 JumpIfSmi(scratch1, failure);
2628 // Load instance type for both strings.
2629 mov(scratch1, FieldOperand(object1, HeapObject::kMapOffset));
2630 mov(scratch2, FieldOperand(object2, HeapObject::kMapOffset));
2631 movzx_b(scratch1, FieldOperand(scratch1, Map::kInstanceTypeOffset));
2632 movzx_b(scratch2, FieldOperand(scratch2, Map::kInstanceTypeOffset));
2634 // Check that both are flat ASCII strings.
2635 const int kFlatAsciiStringMask =
2636 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
2637 const int kFlatAsciiStringTag = ASCII_STRING_TYPE;
2638 // Interleave bits from both instance types and compare them in one check.
2639 ASSERT_EQ(0, kFlatAsciiStringMask & (kFlatAsciiStringMask << 3));
2640 and_(scratch1, kFlatAsciiStringMask);
2641 and_(scratch2, kFlatAsciiStringMask);
2642 lea(scratch1, Operand(scratch1, scratch2, times_8, 0));
2643 cmp(scratch1, kFlatAsciiStringTag | (kFlatAsciiStringTag << 3));
2644 j(not_equal, failure);
2648 void MacroAssembler::PrepareCallCFunction(int num_arguments, Register scratch) {
2649 int frame_alignment = OS::ActivationFrameAlignment();
2650 if (frame_alignment != 0) {
2651 // Make stack end at alignment and make room for num_arguments words
2652 // and the original value of esp.
2654 sub(esp, Immediate((num_arguments + 1) * kPointerSize));
2655 ASSERT(IsPowerOf2(frame_alignment));
2656 and_(esp, -frame_alignment);
2657 mov(Operand(esp, num_arguments * kPointerSize), scratch);
2659 sub(esp, Immediate(num_arguments * kPointerSize));
2664 void MacroAssembler::CallCFunction(ExternalReference function,
2665 int num_arguments) {
2666 // Trashing eax is ok as it will be the return value.
2667 mov(eax, Immediate(function));
2668 CallCFunction(eax, num_arguments);
2672 void MacroAssembler::CallCFunction(Register function,
2673 int num_arguments) {
2674 ASSERT(has_frame());
2675 // Check stack alignment.
2676 if (emit_debug_code()) {
2677 CheckStackAlignment();
2681 if (OS::ActivationFrameAlignment() != 0) {
2682 mov(esp, Operand(esp, num_arguments * kPointerSize));
2684 add(esp, Immediate(num_arguments * kPointerSize));
2689 bool AreAliased(Register r1, Register r2, Register r3, Register r4) {
2690 if (r1.is(r2)) return true;
2691 if (r1.is(r3)) return true;
2692 if (r1.is(r4)) return true;
2693 if (r2.is(r3)) return true;
2694 if (r2.is(r4)) return true;
2695 if (r3.is(r4)) return true;
2700 CodePatcher::CodePatcher(byte* address, int size)
2701 : address_(address),
2703 masm_(NULL, address, size + Assembler::kGap) {
2704 // Create a new macro assembler pointing to the address of the code to patch.
2705 // The size is adjusted with kGap on order for the assembler to generate size
2706 // bytes of instructions without failing with buffer size constraints.
2707 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
2711 CodePatcher::~CodePatcher() {
2712 // Indicate that code has changed.
2713 CPU::FlushICache(address_, size_);
2715 // Check that the code was patched as expected.
2716 ASSERT(masm_.pc_ == address_ + size_);
2717 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
2721 void MacroAssembler::CheckPageFlag(
2726 Label* condition_met,
2727 Label::Distance condition_met_distance) {
2728 ASSERT(cc == zero || cc == not_zero);
2729 if (scratch.is(object)) {
2730 and_(scratch, Immediate(~Page::kPageAlignmentMask));
2732 mov(scratch, Immediate(~Page::kPageAlignmentMask));
2733 and_(scratch, object);
2735 if (mask < (1 << kBitsPerByte)) {
2736 test_b(Operand(scratch, MemoryChunk::kFlagsOffset),
2737 static_cast<uint8_t>(mask));
2739 test(Operand(scratch, MemoryChunk::kFlagsOffset), Immediate(mask));
2741 j(cc, condition_met, condition_met_distance);
2745 void MacroAssembler::CheckPageFlagForMap(
2749 Label* condition_met,
2750 Label::Distance condition_met_distance) {
2751 ASSERT(cc == zero || cc == not_zero);
2752 Page* page = Page::FromAddress(map->address());
2753 ExternalReference reference(ExternalReference::page_flags(page));
2754 // The inlined static address check of the page's flags relies
2755 // on maps never being compacted.
2756 ASSERT(!isolate()->heap()->mark_compact_collector()->
2757 IsOnEvacuationCandidate(*map));
2758 if (mask < (1 << kBitsPerByte)) {
2759 test_b(Operand::StaticVariable(reference), static_cast<uint8_t>(mask));
2761 test(Operand::StaticVariable(reference), Immediate(mask));
2763 j(cc, condition_met, condition_met_distance);
2767 void MacroAssembler::JumpIfBlack(Register object,
2771 Label::Distance on_black_near) {
2772 HasColor(object, scratch0, scratch1,
2773 on_black, on_black_near,
2774 1, 0); // kBlackBitPattern.
2775 ASSERT(strcmp(Marking::kBlackBitPattern, "10") == 0);
2779 void MacroAssembler::HasColor(Register object,
2780 Register bitmap_scratch,
2781 Register mask_scratch,
2783 Label::Distance has_color_distance,
2786 ASSERT(!AreAliased(object, bitmap_scratch, mask_scratch, ecx));
2788 GetMarkBits(object, bitmap_scratch, mask_scratch);
2790 Label other_color, word_boundary;
2791 test(mask_scratch, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
2792 j(first_bit == 1 ? zero : not_zero, &other_color, Label::kNear);
2793 add(mask_scratch, mask_scratch); // Shift left 1 by adding.
2794 j(zero, &word_boundary, Label::kNear);
2795 test(mask_scratch, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
2796 j(second_bit == 1 ? not_zero : zero, has_color, has_color_distance);
2797 jmp(&other_color, Label::kNear);
2799 bind(&word_boundary);
2800 test_b(Operand(bitmap_scratch, MemoryChunk::kHeaderSize + kPointerSize), 1);
2802 j(second_bit == 1 ? not_zero : zero, has_color, has_color_distance);
2807 void MacroAssembler::GetMarkBits(Register addr_reg,
2808 Register bitmap_reg,
2809 Register mask_reg) {
2810 ASSERT(!AreAliased(addr_reg, mask_reg, bitmap_reg, ecx));
2811 mov(bitmap_reg, Immediate(~Page::kPageAlignmentMask));
2812 and_(bitmap_reg, addr_reg);
2815 Bitmap::kBitsPerCellLog2 + kPointerSizeLog2 - Bitmap::kBytesPerCellLog2;
2818 (Page::kPageAlignmentMask >> shift) & ~(Bitmap::kBytesPerCell - 1));
2820 add(bitmap_reg, ecx);
2822 shr(ecx, kPointerSizeLog2);
2823 and_(ecx, (1 << Bitmap::kBitsPerCellLog2) - 1);
2824 mov(mask_reg, Immediate(1));
2829 void MacroAssembler::EnsureNotWhite(
2831 Register bitmap_scratch,
2832 Register mask_scratch,
2833 Label* value_is_white_and_not_data,
2834 Label::Distance distance) {
2835 ASSERT(!AreAliased(value, bitmap_scratch, mask_scratch, ecx));
2836 GetMarkBits(value, bitmap_scratch, mask_scratch);
2838 // If the value is black or grey we don't need to do anything.
2839 ASSERT(strcmp(Marking::kWhiteBitPattern, "00") == 0);
2840 ASSERT(strcmp(Marking::kBlackBitPattern, "10") == 0);
2841 ASSERT(strcmp(Marking::kGreyBitPattern, "11") == 0);
2842 ASSERT(strcmp(Marking::kImpossibleBitPattern, "01") == 0);
2846 // Since both black and grey have a 1 in the first position and white does
2847 // not have a 1 there we only need to check one bit.
2848 test(mask_scratch, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
2849 j(not_zero, &done, Label::kNear);
2851 if (emit_debug_code()) {
2852 // Check for impossible bit pattern.
2855 // shl. May overflow making the check conservative.
2856 add(mask_scratch, mask_scratch);
2857 test(mask_scratch, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
2858 j(zero, &ok, Label::kNear);
2864 // Value is white. We check whether it is data that doesn't need scanning.
2865 // Currently only checks for HeapNumber and non-cons strings.
2866 Register map = ecx; // Holds map while checking type.
2867 Register length = ecx; // Holds length of object after checking type.
2868 Label not_heap_number;
2869 Label is_data_object;
2871 // Check for heap-number
2872 mov(map, FieldOperand(value, HeapObject::kMapOffset));
2873 cmp(map, FACTORY->heap_number_map());
2874 j(not_equal, ¬_heap_number, Label::kNear);
2875 mov(length, Immediate(HeapNumber::kSize));
2876 jmp(&is_data_object, Label::kNear);
2878 bind(¬_heap_number);
2879 // Check for strings.
2880 ASSERT(kIsIndirectStringTag == 1 && kIsIndirectStringMask == 1);
2881 ASSERT(kNotStringTag == 0x80 && kIsNotStringMask == 0x80);
2882 // If it's a string and it's not a cons string then it's an object containing
2884 Register instance_type = ecx;
2885 movzx_b(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
2886 test_b(instance_type, kIsIndirectStringMask | kIsNotStringMask);
2887 j(not_zero, value_is_white_and_not_data);
2888 // It's a non-indirect (non-cons and non-slice) string.
2889 // If it's external, the length is just ExternalString::kSize.
2890 // Otherwise it's String::kHeaderSize + string->length() * (1 or 2).
2892 // External strings are the only ones with the kExternalStringTag bit
2894 ASSERT_EQ(0, kSeqStringTag & kExternalStringTag);
2895 ASSERT_EQ(0, kConsStringTag & kExternalStringTag);
2896 test_b(instance_type, kExternalStringTag);
2897 j(zero, ¬_external, Label::kNear);
2898 mov(length, Immediate(ExternalString::kSize));
2899 jmp(&is_data_object, Label::kNear);
2901 bind(¬_external);
2902 // Sequential string, either ASCII or UC16.
2903 ASSERT(kAsciiStringTag == 0x04);
2904 and_(length, Immediate(kStringEncodingMask));
2905 xor_(length, Immediate(kStringEncodingMask));
2906 add(length, Immediate(0x04));
2907 // Value now either 4 (if ASCII) or 8 (if UC16), i.e., char-size shifted
2908 // by 2. If we multiply the string length as smi by this, it still
2909 // won't overflow a 32-bit value.
2910 ASSERT_EQ(SeqAsciiString::kMaxSize, SeqTwoByteString::kMaxSize);
2911 ASSERT(SeqAsciiString::kMaxSize <=
2912 static_cast<int>(0xffffffffu >> (2 + kSmiTagSize)));
2913 imul(length, FieldOperand(value, String::kLengthOffset));
2914 shr(length, 2 + kSmiTagSize + kSmiShiftSize);
2915 add(length, Immediate(SeqString::kHeaderSize + kObjectAlignmentMask));
2916 and_(length, Immediate(~kObjectAlignmentMask));
2918 bind(&is_data_object);
2919 // Value is a data object, and it is white. Mark it black. Since we know
2920 // that the object is white we can make it black by flipping one bit.
2921 or_(Operand(bitmap_scratch, MemoryChunk::kHeaderSize), mask_scratch);
2923 and_(bitmap_scratch, Immediate(~Page::kPageAlignmentMask));
2924 add(Operand(bitmap_scratch, MemoryChunk::kLiveBytesOffset),
2926 if (emit_debug_code()) {
2927 mov(length, Operand(bitmap_scratch, MemoryChunk::kLiveBytesOffset));
2928 cmp(length, Operand(bitmap_scratch, MemoryChunk::kSizeOffset));
2929 Check(less_equal, "Live Bytes Count overflow chunk size");
2936 void MacroAssembler::EnumLength(Register dst, Register map) {
2937 STATIC_ASSERT(Map::EnumLengthBits::kShift == 0);
2938 mov(dst, FieldOperand(map, Map::kBitField3Offset));
2939 and_(dst, Immediate(Smi::FromInt(Map::EnumLengthBits::kMask)));
2943 void MacroAssembler::CheckEnumCache(Label* call_runtime) {
2947 // Check if the enum length field is properly initialized, indicating that
2948 // there is an enum cache.
2949 mov(ebx, FieldOperand(ecx, HeapObject::kMapOffset));
2951 EnumLength(edx, ebx);
2952 cmp(edx, Immediate(Smi::FromInt(Map::kInvalidEnumCache)));
2953 j(equal, call_runtime);
2958 mov(ebx, FieldOperand(ecx, HeapObject::kMapOffset));
2960 // For all objects but the receiver, check that the cache is empty.
2961 EnumLength(edx, ebx);
2962 cmp(edx, Immediate(Smi::FromInt(0)));
2963 j(not_equal, call_runtime);
2967 // Check that there are no elements. Register rcx contains the current JS
2968 // object we've reached through the prototype chain.
2969 mov(ecx, FieldOperand(ecx, JSObject::kElementsOffset));
2970 cmp(ecx, isolate()->factory()->empty_fixed_array());
2971 j(not_equal, call_runtime);
2973 mov(ecx, FieldOperand(ebx, Map::kPrototypeOffset));
2974 cmp(ecx, isolate()->factory()->null_value());
2975 j(not_equal, &next);
2978 } } // namespace v8::internal
2980 #endif // V8_TARGET_ARCH_IA32