1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30 #if defined(V8_TARGET_ARCH_IA32)
32 #include "bootstrapper.h"
36 #include "serialize.h"
41 // -------------------------------------------------------------------------
42 // MacroAssembler implementation.
44 MacroAssembler::MacroAssembler(Isolate* arg_isolate, void* buffer, int size)
45 : Assembler(arg_isolate, buffer, size),
46 generating_stub_(false),
47 allow_stub_calls_(true),
49 if (isolate() != NULL) {
50 code_object_ = Handle<Object>(isolate()->heap()->undefined_value(),
56 void MacroAssembler::InNewSpace(
61 Label::Distance condition_met_distance) {
62 ASSERT(cc == equal || cc == not_equal);
63 if (scratch.is(object)) {
64 and_(scratch, Immediate(~Page::kPageAlignmentMask));
66 mov(scratch, Immediate(~Page::kPageAlignmentMask));
67 and_(scratch, object);
69 // Check that we can use a test_b.
70 ASSERT(MemoryChunk::IN_FROM_SPACE < 8);
71 ASSERT(MemoryChunk::IN_TO_SPACE < 8);
72 int mask = (1 << MemoryChunk::IN_FROM_SPACE)
73 | (1 << MemoryChunk::IN_TO_SPACE);
74 // If non-zero, the page belongs to new-space.
75 test_b(Operand(scratch, MemoryChunk::kFlagsOffset),
76 static_cast<uint8_t>(mask));
77 j(cc, condition_met, condition_met_distance);
81 void MacroAssembler::RememberedSetHelper(
82 Register object, // Only used for debug checks.
85 SaveFPRegsMode save_fp,
86 MacroAssembler::RememberedSetFinalAction and_then) {
88 if (emit_debug_code()) {
90 JumpIfNotInNewSpace(object, scratch, &ok, Label::kNear);
94 // Load store buffer top.
95 ExternalReference store_buffer =
96 ExternalReference::store_buffer_top(isolate());
97 mov(scratch, Operand::StaticVariable(store_buffer));
98 // Store pointer to buffer.
99 mov(Operand(scratch, 0), addr);
100 // Increment buffer top.
101 add(scratch, Immediate(kPointerSize));
102 // Write back new top of buffer.
103 mov(Operand::StaticVariable(store_buffer), scratch);
104 // Call stub on end of buffer.
105 // Check for end of buffer.
106 test(scratch, Immediate(StoreBuffer::kStoreBufferOverflowBit));
107 if (and_then == kReturnAtEnd) {
108 Label buffer_overflowed;
109 j(not_equal, &buffer_overflowed, Label::kNear);
111 bind(&buffer_overflowed);
113 ASSERT(and_then == kFallThroughAtEnd);
114 j(equal, &done, Label::kNear);
116 StoreBufferOverflowStub store_buffer_overflow =
117 StoreBufferOverflowStub(save_fp);
118 CallStub(&store_buffer_overflow);
119 if (and_then == kReturnAtEnd) {
122 ASSERT(and_then == kFallThroughAtEnd);
128 void MacroAssembler::ClampDoubleToUint8(XMMRegister input_reg,
129 XMMRegister scratch_reg,
130 Register result_reg) {
133 pxor(scratch_reg, scratch_reg);
134 cvtsd2si(result_reg, input_reg);
135 test(result_reg, Immediate(0xFFFFFF00));
136 j(zero, &done, Label::kNear);
137 cmp(result_reg, Immediate(0x80000000));
138 j(equal, &conv_failure, Label::kNear);
139 mov(result_reg, Immediate(0));
140 setcc(above, result_reg);
141 sub(result_reg, Immediate(1));
142 and_(result_reg, Immediate(255));
143 jmp(&done, Label::kNear);
145 Set(result_reg, Immediate(0));
146 ucomisd(input_reg, scratch_reg);
147 j(below, &done, Label::kNear);
148 Set(result_reg, Immediate(255));
153 void MacroAssembler::ClampUint8(Register reg) {
155 test(reg, Immediate(0xFFFFFF00));
156 j(zero, &done, Label::kNear);
157 setcc(negative, reg); // 1 if negative, 0 if positive.
158 dec_b(reg); // 0 if negative, 255 if positive.
163 static double kUint32Bias =
164 static_cast<double>(static_cast<uint32_t>(0xFFFFFFFF)) + 1;
167 void MacroAssembler::LoadUint32(XMMRegister dst,
169 XMMRegister scratch) {
171 cmp(src, Immediate(0));
173 Operand(reinterpret_cast<int32_t>(&kUint32Bias), RelocInfo::NONE32));
175 j(not_sign, &done, Label::kNear);
181 void MacroAssembler::RecordWriteArray(Register object,
184 SaveFPRegsMode save_fp,
185 RememberedSetAction remembered_set_action,
186 SmiCheck smi_check) {
187 // First, check if a write barrier is even needed. The tests below
188 // catch stores of Smis.
191 // Skip barrier if writing a smi.
192 if (smi_check == INLINE_SMI_CHECK) {
193 ASSERT_EQ(0, kSmiTag);
194 test(value, Immediate(kSmiTagMask));
198 // Array access: calculate the destination address in the same manner as
199 // KeyedStoreIC::GenerateGeneric. Multiply a smi by 2 to get an offset
200 // into an array of words.
201 Register dst = index;
202 lea(dst, Operand(object, index, times_half_pointer_size,
203 FixedArray::kHeaderSize - kHeapObjectTag));
206 object, dst, value, save_fp, remembered_set_action, OMIT_SMI_CHECK);
210 // Clobber clobbered input registers when running with the debug-code flag
211 // turned on to provoke errors.
212 if (emit_debug_code()) {
213 mov(value, Immediate(BitCast<int32_t>(kZapValue)));
214 mov(index, Immediate(BitCast<int32_t>(kZapValue)));
219 void MacroAssembler::RecordWriteField(
224 SaveFPRegsMode save_fp,
225 RememberedSetAction remembered_set_action,
226 SmiCheck smi_check) {
227 // First, check if a write barrier is even needed. The tests below
228 // catch stores of Smis.
231 // Skip barrier if writing a smi.
232 if (smi_check == INLINE_SMI_CHECK) {
233 JumpIfSmi(value, &done, Label::kNear);
236 // Although the object register is tagged, the offset is relative to the start
237 // of the object, so so offset must be a multiple of kPointerSize.
238 ASSERT(IsAligned(offset, kPointerSize));
240 lea(dst, FieldOperand(object, offset));
241 if (emit_debug_code()) {
243 test_b(dst, (1 << kPointerSizeLog2) - 1);
244 j(zero, &ok, Label::kNear);
250 object, dst, value, save_fp, remembered_set_action, OMIT_SMI_CHECK);
254 // Clobber clobbered input registers when running with the debug-code flag
255 // turned on to provoke errors.
256 if (emit_debug_code()) {
257 mov(value, Immediate(BitCast<int32_t>(kZapValue)));
258 mov(dst, Immediate(BitCast<int32_t>(kZapValue)));
263 void MacroAssembler::RecordWriteForMap(
268 SaveFPRegsMode save_fp) {
271 Register address = scratch1;
272 Register value = scratch2;
273 if (emit_debug_code()) {
275 lea(address, FieldOperand(object, HeapObject::kMapOffset));
276 test_b(address, (1 << kPointerSizeLog2) - 1);
277 j(zero, &ok, Label::kNear);
282 ASSERT(!object.is(value));
283 ASSERT(!object.is(address));
284 ASSERT(!value.is(address));
285 AssertNotSmi(object);
287 if (!FLAG_incremental_marking) {
291 // A single check of the map's pages interesting flag suffices, since it is
292 // only set during incremental collection, and then it's also guaranteed that
293 // the from object's page's interesting flag is also set. This optimization
294 // relies on the fact that maps can never be in new space.
295 ASSERT(!isolate()->heap()->InNewSpace(*map));
296 CheckPageFlagForMap(map,
297 MemoryChunk::kPointersToHereAreInterestingMask,
302 // Delay the initialization of |address| and |value| for the stub until it's
303 // known that the will be needed. Up until this point their values are not
304 // needed since they are embedded in the operands of instructions that need
306 lea(address, FieldOperand(object, HeapObject::kMapOffset));
307 mov(value, Immediate(map));
308 RecordWriteStub stub(object, value, address, OMIT_REMEMBERED_SET, save_fp);
313 // Clobber clobbered input registers when running with the debug-code flag
314 // turned on to provoke errors.
315 if (emit_debug_code()) {
316 mov(value, Immediate(BitCast<int32_t>(kZapValue)));
317 mov(scratch1, Immediate(BitCast<int32_t>(kZapValue)));
318 mov(scratch2, Immediate(BitCast<int32_t>(kZapValue)));
323 void MacroAssembler::RecordWrite(Register object,
326 SaveFPRegsMode fp_mode,
327 RememberedSetAction remembered_set_action,
328 SmiCheck smi_check) {
329 ASSERT(!object.is(value));
330 ASSERT(!object.is(address));
331 ASSERT(!value.is(address));
332 AssertNotSmi(object);
334 if (remembered_set_action == OMIT_REMEMBERED_SET &&
335 !FLAG_incremental_marking) {
339 if (emit_debug_code()) {
341 cmp(value, Operand(address, 0));
342 j(equal, &ok, Label::kNear);
347 // First, check if a write barrier is even needed. The tests below
348 // catch stores of Smis and stores into young gen.
351 if (smi_check == INLINE_SMI_CHECK) {
352 // Skip barrier if writing a smi.
353 JumpIfSmi(value, &done, Label::kNear);
357 value, // Used as scratch.
358 MemoryChunk::kPointersToHereAreInterestingMask,
362 CheckPageFlag(object,
363 value, // Used as scratch.
364 MemoryChunk::kPointersFromHereAreInterestingMask,
369 RecordWriteStub stub(object, value, address, remembered_set_action, fp_mode);
374 // Clobber clobbered registers when running with the debug-code flag
375 // turned on to provoke errors.
376 if (emit_debug_code()) {
377 mov(address, Immediate(BitCast<int32_t>(kZapValue)));
378 mov(value, Immediate(BitCast<int32_t>(kZapValue)));
383 #ifdef ENABLE_DEBUGGER_SUPPORT
384 void MacroAssembler::DebugBreak() {
385 Set(eax, Immediate(0));
386 mov(ebx, Immediate(ExternalReference(Runtime::kDebugBreak, isolate())));
388 call(ces.GetCode(isolate()), RelocInfo::DEBUG_BREAK);
393 void MacroAssembler::Set(Register dst, const Immediate& x) {
395 xor_(dst, dst); // Shorter than mov.
402 void MacroAssembler::Set(const Operand& dst, const Immediate& x) {
407 bool MacroAssembler::IsUnsafeImmediate(const Immediate& x) {
408 static const int kMaxImmediateBits = 17;
409 if (!RelocInfo::IsNone(x.rmode_)) return false;
410 return !is_intn(x.x_, kMaxImmediateBits);
414 void MacroAssembler::SafeSet(Register dst, const Immediate& x) {
415 if (IsUnsafeImmediate(x) && jit_cookie() != 0) {
416 Set(dst, Immediate(x.x_ ^ jit_cookie()));
417 xor_(dst, jit_cookie());
424 void MacroAssembler::SafePush(const Immediate& x) {
425 if (IsUnsafeImmediate(x) && jit_cookie() != 0) {
426 push(Immediate(x.x_ ^ jit_cookie()));
427 xor_(Operand(esp, 0), Immediate(jit_cookie()));
434 void MacroAssembler::CompareRoot(Register with, Heap::RootListIndex index) {
435 // see ROOT_ACCESSOR macro in factory.h
436 Handle<Object> value(&isolate()->heap()->roots_array_start()[index]);
441 void MacroAssembler::CompareRoot(const Operand& with,
442 Heap::RootListIndex index) {
443 // see ROOT_ACCESSOR macro in factory.h
444 Handle<Object> value(&isolate()->heap()->roots_array_start()[index]);
449 void MacroAssembler::CmpObjectType(Register heap_object,
452 mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
453 CmpInstanceType(map, type);
457 void MacroAssembler::CmpInstanceType(Register map, InstanceType type) {
458 cmpb(FieldOperand(map, Map::kInstanceTypeOffset),
459 static_cast<int8_t>(type));
463 void MacroAssembler::CheckFastElements(Register map,
465 Label::Distance distance) {
466 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
467 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
468 STATIC_ASSERT(FAST_ELEMENTS == 2);
469 STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3);
470 cmpb(FieldOperand(map, Map::kBitField2Offset),
471 Map::kMaximumBitField2FastHoleyElementValue);
472 j(above, fail, distance);
476 void MacroAssembler::CheckFastObjectElements(Register map,
478 Label::Distance distance) {
479 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
480 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
481 STATIC_ASSERT(FAST_ELEMENTS == 2);
482 STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3);
483 cmpb(FieldOperand(map, Map::kBitField2Offset),
484 Map::kMaximumBitField2FastHoleySmiElementValue);
485 j(below_equal, fail, distance);
486 cmpb(FieldOperand(map, Map::kBitField2Offset),
487 Map::kMaximumBitField2FastHoleyElementValue);
488 j(above, fail, distance);
492 void MacroAssembler::CheckFastSmiElements(Register map,
494 Label::Distance distance) {
495 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
496 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
497 cmpb(FieldOperand(map, Map::kBitField2Offset),
498 Map::kMaximumBitField2FastHoleySmiElementValue);
499 j(above, fail, distance);
503 void MacroAssembler::StoreNumberToDoubleElements(
504 Register maybe_number,
508 XMMRegister scratch2,
510 bool specialize_for_processor,
511 int elements_offset) {
512 Label smi_value, done, maybe_nan, not_nan, is_nan, have_double_value;
513 JumpIfSmi(maybe_number, &smi_value, Label::kNear);
515 CheckMap(maybe_number,
516 isolate()->factory()->heap_number_map(),
520 // Double value, canonicalize NaN.
521 uint32_t offset = HeapNumber::kValueOffset + sizeof(kHoleNanLower32);
522 cmp(FieldOperand(maybe_number, offset),
523 Immediate(kNaNOrInfinityLowerBoundUpper32));
524 j(greater_equal, &maybe_nan, Label::kNear);
527 ExternalReference canonical_nan_reference =
528 ExternalReference::address_of_canonical_non_hole_nan();
529 if (CpuFeatures::IsSupported(SSE2) && specialize_for_processor) {
530 CpuFeatureScope use_sse2(this, SSE2);
531 movdbl(scratch2, FieldOperand(maybe_number, HeapNumber::kValueOffset));
532 bind(&have_double_value);
533 movdbl(FieldOperand(elements, key, times_4,
534 FixedDoubleArray::kHeaderSize - elements_offset),
537 fld_d(FieldOperand(maybe_number, HeapNumber::kValueOffset));
538 bind(&have_double_value);
539 fstp_d(FieldOperand(elements, key, times_4,
540 FixedDoubleArray::kHeaderSize - elements_offset));
545 // Could be NaN or Infinity. If fraction is not zero, it's NaN, otherwise
546 // it's an Infinity, and the non-NaN code path applies.
547 j(greater, &is_nan, Label::kNear);
548 cmp(FieldOperand(maybe_number, HeapNumber::kValueOffset), Immediate(0));
551 if (CpuFeatures::IsSupported(SSE2) && specialize_for_processor) {
552 CpuFeatureScope use_sse2(this, SSE2);
553 movdbl(scratch2, Operand::StaticVariable(canonical_nan_reference));
555 fld_d(Operand::StaticVariable(canonical_nan_reference));
557 jmp(&have_double_value, Label::kNear);
560 // Value is a smi. Convert to a double and store.
561 // Preserve original value.
562 mov(scratch1, maybe_number);
564 if (CpuFeatures::IsSupported(SSE2) && specialize_for_processor) {
565 CpuFeatureScope fscope(this, SSE2);
566 cvtsi2sd(scratch2, scratch1);
567 movdbl(FieldOperand(elements, key, times_4,
568 FixedDoubleArray::kHeaderSize - elements_offset),
572 fild_s(Operand(esp, 0));
574 fstp_d(FieldOperand(elements, key, times_4,
575 FixedDoubleArray::kHeaderSize - elements_offset));
581 void MacroAssembler::CompareMap(Register obj,
583 Label* early_success) {
584 cmp(FieldOperand(obj, HeapObject::kMapOffset), map);
588 void MacroAssembler::CheckMap(Register obj,
591 SmiCheckType smi_check_type) {
592 if (smi_check_type == DO_SMI_CHECK) {
593 JumpIfSmi(obj, fail);
597 CompareMap(obj, map, &success);
603 void MacroAssembler::DispatchMap(Register obj,
606 Handle<Code> success,
607 SmiCheckType smi_check_type) {
609 if (smi_check_type == DO_SMI_CHECK) {
610 JumpIfSmi(obj, &fail);
612 cmp(FieldOperand(obj, HeapObject::kMapOffset), Immediate(map));
619 Condition MacroAssembler::IsObjectStringType(Register heap_object,
621 Register instance_type) {
622 mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
623 movzx_b(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
624 STATIC_ASSERT(kNotStringTag != 0);
625 test(instance_type, Immediate(kIsNotStringMask));
630 Condition MacroAssembler::IsObjectNameType(Register heap_object,
632 Register instance_type) {
633 mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
634 movzx_b(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
635 cmpb(instance_type, static_cast<uint8_t>(LAST_NAME_TYPE));
640 void MacroAssembler::IsObjectJSObjectType(Register heap_object,
644 mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
645 IsInstanceJSObjectType(map, scratch, fail);
649 void MacroAssembler::IsInstanceJSObjectType(Register map,
652 movzx_b(scratch, FieldOperand(map, Map::kInstanceTypeOffset));
653 sub(scratch, Immediate(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
655 LAST_NONCALLABLE_SPEC_OBJECT_TYPE - FIRST_NONCALLABLE_SPEC_OBJECT_TYPE);
660 void MacroAssembler::FCmp() {
661 if (CpuFeatures::IsSupported(CMOV)) {
674 void MacroAssembler::AssertNumber(Register object) {
675 if (emit_debug_code()) {
677 JumpIfSmi(object, &ok);
678 cmp(FieldOperand(object, HeapObject::kMapOffset),
679 isolate()->factory()->heap_number_map());
680 Check(equal, "Operand not a number");
686 void MacroAssembler::AssertSmi(Register object) {
687 if (emit_debug_code()) {
688 test(object, Immediate(kSmiTagMask));
689 Check(equal, "Operand is not a smi");
694 void MacroAssembler::AssertString(Register object) {
695 if (emit_debug_code()) {
696 test(object, Immediate(kSmiTagMask));
697 Check(not_equal, "Operand is a smi and not a string");
699 mov(object, FieldOperand(object, HeapObject::kMapOffset));
700 CmpInstanceType(object, FIRST_NONSTRING_TYPE);
702 Check(below, "Operand is not a string");
707 void MacroAssembler::AssertName(Register object) {
708 if (emit_debug_code()) {
709 test(object, Immediate(kSmiTagMask));
710 Check(not_equal, "Operand is a smi and not a name");
712 mov(object, FieldOperand(object, HeapObject::kMapOffset));
713 CmpInstanceType(object, LAST_NAME_TYPE);
715 Check(below_equal, "Operand is not a name");
720 void MacroAssembler::AssertNotSmi(Register object) {
721 if (emit_debug_code()) {
722 test(object, Immediate(kSmiTagMask));
723 Check(not_equal, "Operand is a smi");
728 void MacroAssembler::EnterFrame(StackFrame::Type type) {
732 push(Immediate(Smi::FromInt(type)));
733 push(Immediate(CodeObject()));
734 if (emit_debug_code()) {
735 cmp(Operand(esp, 0), Immediate(isolate()->factory()->undefined_value()));
736 Check(not_equal, "code object not properly patched");
741 void MacroAssembler::LeaveFrame(StackFrame::Type type) {
742 if (emit_debug_code()) {
743 cmp(Operand(ebp, StandardFrameConstants::kMarkerOffset),
744 Immediate(Smi::FromInt(type)));
745 Check(equal, "stack frame types must match");
751 void MacroAssembler::EnterExitFramePrologue() {
752 // Set up the frame structure on the stack.
753 ASSERT(ExitFrameConstants::kCallerSPDisplacement == +2 * kPointerSize);
754 ASSERT(ExitFrameConstants::kCallerPCOffset == +1 * kPointerSize);
755 ASSERT(ExitFrameConstants::kCallerFPOffset == 0 * kPointerSize);
759 // Reserve room for entry stack pointer and push the code object.
760 ASSERT(ExitFrameConstants::kSPOffset == -1 * kPointerSize);
761 push(Immediate(0)); // Saved entry sp, patched before call.
762 push(Immediate(CodeObject())); // Accessed from ExitFrame::code_slot.
764 // Save the frame pointer and the context in top.
765 ExternalReference c_entry_fp_address(Isolate::kCEntryFPAddress,
767 ExternalReference context_address(Isolate::kContextAddress,
769 mov(Operand::StaticVariable(c_entry_fp_address), ebp);
770 mov(Operand::StaticVariable(context_address), esi);
774 void MacroAssembler::EnterExitFrameEpilogue(int argc, bool save_doubles) {
775 // Optionally save all XMM registers.
777 CpuFeatureScope scope(this, SSE2);
778 int space = XMMRegister::kNumRegisters * kDoubleSize + argc * kPointerSize;
779 sub(esp, Immediate(space));
780 const int offset = -2 * kPointerSize;
781 for (int i = 0; i < XMMRegister::kNumRegisters; i++) {
782 XMMRegister reg = XMMRegister::from_code(i);
783 movdbl(Operand(ebp, offset - ((i + 1) * kDoubleSize)), reg);
786 sub(esp, Immediate(argc * kPointerSize));
789 // Get the required frame alignment for the OS.
790 const int kFrameAlignment = OS::ActivationFrameAlignment();
791 if (kFrameAlignment > 0) {
792 ASSERT(IsPowerOf2(kFrameAlignment));
793 and_(esp, -kFrameAlignment);
796 // Patch the saved entry sp.
797 mov(Operand(ebp, ExitFrameConstants::kSPOffset), esp);
801 void MacroAssembler::EnterExitFrame(bool save_doubles) {
802 EnterExitFramePrologue();
804 // Set up argc and argv in callee-saved registers.
805 int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
807 lea(esi, Operand(ebp, eax, times_4, offset));
809 // Reserve space for argc, argv and isolate.
810 EnterExitFrameEpilogue(3, save_doubles);
814 void MacroAssembler::EnterApiExitFrame(int argc) {
815 EnterExitFramePrologue();
816 EnterExitFrameEpilogue(argc, false);
820 void MacroAssembler::LeaveExitFrame(bool save_doubles) {
821 // Optionally restore all XMM registers.
823 CpuFeatureScope scope(this, SSE2);
824 const int offset = -2 * kPointerSize;
825 for (int i = 0; i < XMMRegister::kNumRegisters; i++) {
826 XMMRegister reg = XMMRegister::from_code(i);
827 movdbl(reg, Operand(ebp, offset - ((i + 1) * kDoubleSize)));
831 // Get the return address from the stack and restore the frame pointer.
832 mov(ecx, Operand(ebp, 1 * kPointerSize));
833 mov(ebp, Operand(ebp, 0 * kPointerSize));
835 // Pop the arguments and the receiver from the caller stack.
836 lea(esp, Operand(esi, 1 * kPointerSize));
838 // Push the return address to get ready to return.
841 LeaveExitFrameEpilogue();
844 void MacroAssembler::LeaveExitFrameEpilogue() {
845 // Restore current context from top and clear it in debug mode.
846 ExternalReference context_address(Isolate::kContextAddress, isolate());
847 mov(esi, Operand::StaticVariable(context_address));
849 mov(Operand::StaticVariable(context_address), Immediate(0));
852 // Clear the top frame.
853 ExternalReference c_entry_fp_address(Isolate::kCEntryFPAddress,
855 mov(Operand::StaticVariable(c_entry_fp_address), Immediate(0));
859 void MacroAssembler::LeaveApiExitFrame() {
863 LeaveExitFrameEpilogue();
867 void MacroAssembler::PushTryHandler(StackHandler::Kind kind,
869 // Adjust this code if not the case.
870 STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize);
871 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
872 STATIC_ASSERT(StackHandlerConstants::kCodeOffset == 1 * kPointerSize);
873 STATIC_ASSERT(StackHandlerConstants::kStateOffset == 2 * kPointerSize);
874 STATIC_ASSERT(StackHandlerConstants::kContextOffset == 3 * kPointerSize);
875 STATIC_ASSERT(StackHandlerConstants::kFPOffset == 4 * kPointerSize);
877 // We will build up the handler from the bottom by pushing on the stack.
878 // First push the frame pointer and context.
879 if (kind == StackHandler::JS_ENTRY) {
880 // The frame pointer does not point to a JS frame so we save NULL for
881 // ebp. We expect the code throwing an exception to check ebp before
882 // dereferencing it to restore the context.
883 push(Immediate(0)); // NULL frame pointer.
884 push(Immediate(Smi::FromInt(0))); // No context.
889 // Push the state and the code object.
891 StackHandler::IndexField::encode(handler_index) |
892 StackHandler::KindField::encode(kind);
893 push(Immediate(state));
896 // Link the current handler as the next handler.
897 ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
898 push(Operand::StaticVariable(handler_address));
899 // Set this new handler as the current one.
900 mov(Operand::StaticVariable(handler_address), esp);
904 void MacroAssembler::PopTryHandler() {
905 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
906 ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
907 pop(Operand::StaticVariable(handler_address));
908 add(esp, Immediate(StackHandlerConstants::kSize - kPointerSize));
912 void MacroAssembler::JumpToHandlerEntry() {
913 // Compute the handler entry address and jump to it. The handler table is
914 // a fixed array of (smi-tagged) code offsets.
915 // eax = exception, edi = code object, edx = state.
916 mov(ebx, FieldOperand(edi, Code::kHandlerTableOffset));
917 shr(edx, StackHandler::kKindWidth);
918 mov(edx, FieldOperand(ebx, edx, times_4, FixedArray::kHeaderSize));
920 lea(edi, FieldOperand(edi, edx, times_1, Code::kHeaderSize));
925 void MacroAssembler::Throw(Register value) {
926 // Adjust this code if not the case.
927 STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize);
928 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
929 STATIC_ASSERT(StackHandlerConstants::kCodeOffset == 1 * kPointerSize);
930 STATIC_ASSERT(StackHandlerConstants::kStateOffset == 2 * kPointerSize);
931 STATIC_ASSERT(StackHandlerConstants::kContextOffset == 3 * kPointerSize);
932 STATIC_ASSERT(StackHandlerConstants::kFPOffset == 4 * kPointerSize);
934 // The exception is expected in eax.
935 if (!value.is(eax)) {
938 // Drop the stack pointer to the top of the top handler.
939 ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
940 mov(esp, Operand::StaticVariable(handler_address));
941 // Restore the next handler.
942 pop(Operand::StaticVariable(handler_address));
944 // Remove the code object and state, compute the handler address in edi.
945 pop(edi); // Code object.
946 pop(edx); // Index and state.
948 // Restore the context and frame pointer.
949 pop(esi); // Context.
950 pop(ebp); // Frame pointer.
952 // If the handler is a JS frame, restore the context to the frame.
953 // (kind == ENTRY) == (ebp == 0) == (esi == 0), so we could test either
957 j(zero, &skip, Label::kNear);
958 mov(Operand(ebp, StandardFrameConstants::kContextOffset), esi);
961 JumpToHandlerEntry();
965 void MacroAssembler::ThrowUncatchable(Register value) {
966 // Adjust this code if not the case.
967 STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize);
968 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
969 STATIC_ASSERT(StackHandlerConstants::kCodeOffset == 1 * kPointerSize);
970 STATIC_ASSERT(StackHandlerConstants::kStateOffset == 2 * kPointerSize);
971 STATIC_ASSERT(StackHandlerConstants::kContextOffset == 3 * kPointerSize);
972 STATIC_ASSERT(StackHandlerConstants::kFPOffset == 4 * kPointerSize);
974 // The exception is expected in eax.
975 if (!value.is(eax)) {
978 // Drop the stack pointer to the top of the top stack handler.
979 ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
980 mov(esp, Operand::StaticVariable(handler_address));
982 // Unwind the handlers until the top ENTRY handler is found.
983 Label fetch_next, check_kind;
984 jmp(&check_kind, Label::kNear);
986 mov(esp, Operand(esp, StackHandlerConstants::kNextOffset));
989 STATIC_ASSERT(StackHandler::JS_ENTRY == 0);
990 test(Operand(esp, StackHandlerConstants::kStateOffset),
991 Immediate(StackHandler::KindField::kMask));
992 j(not_zero, &fetch_next);
994 // Set the top handler address to next handler past the top ENTRY handler.
995 pop(Operand::StaticVariable(handler_address));
997 // Remove the code object and state, compute the handler address in edi.
998 pop(edi); // Code object.
999 pop(edx); // Index and state.
1001 // Clear the context pointer and frame pointer (0 was saved in the handler).
1005 JumpToHandlerEntry();
1009 void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
1013 Label same_contexts;
1015 ASSERT(!holder_reg.is(scratch1));
1016 ASSERT(!holder_reg.is(scratch2));
1017 ASSERT(!scratch1.is(scratch2));
1019 // Load current lexical context from the stack frame.
1020 mov(scratch1, Operand(ebp, StandardFrameConstants::kContextOffset));
1022 // When generating debug code, make sure the lexical context is set.
1023 if (emit_debug_code()) {
1024 cmp(scratch1, Immediate(0));
1025 Check(not_equal, "we should not have an empty lexical context");
1027 // Load the native context of the current context.
1029 Context::kHeaderSize + Context::GLOBAL_OBJECT_INDEX * kPointerSize;
1030 mov(scratch1, FieldOperand(scratch1, offset));
1031 mov(scratch1, FieldOperand(scratch1, GlobalObject::kNativeContextOffset));
1033 // Check the context is a native context.
1034 if (emit_debug_code()) {
1035 // Read the first word and compare to native_context_map.
1036 cmp(FieldOperand(scratch1, HeapObject::kMapOffset),
1037 isolate()->factory()->native_context_map());
1038 Check(equal, "JSGlobalObject::native_context should be a native context.");
1041 // Check if both contexts are the same.
1042 cmp(scratch1, FieldOperand(holder_reg, JSGlobalProxy::kNativeContextOffset));
1043 j(equal, &same_contexts);
1045 // Compare security tokens, save holder_reg on the stack so we can use it
1046 // as a temporary register.
1048 // Check that the security token in the calling global object is
1049 // compatible with the security token in the receiving global
1052 FieldOperand(holder_reg, JSGlobalProxy::kNativeContextOffset));
1054 // Check the context is a native context.
1055 if (emit_debug_code()) {
1056 cmp(scratch2, isolate()->factory()->null_value());
1057 Check(not_equal, "JSGlobalProxy::context() should not be null.");
1059 // Read the first word and compare to native_context_map(),
1060 cmp(FieldOperand(scratch2, HeapObject::kMapOffset),
1061 isolate()->factory()->native_context_map());
1062 Check(equal, "JSGlobalObject::native_context should be a native context.");
1065 int token_offset = Context::kHeaderSize +
1066 Context::SECURITY_TOKEN_INDEX * kPointerSize;
1067 mov(scratch1, FieldOperand(scratch1, token_offset));
1068 cmp(scratch1, FieldOperand(scratch2, token_offset));
1071 bind(&same_contexts);
1075 // Compute the hash code from the untagged key. This must be kept in sync
1076 // with ComputeIntegerHash in utils.h.
1078 // Note: r0 will contain hash code
1079 void MacroAssembler::GetNumberHash(Register r0, Register scratch) {
1080 // Xor original key with a seed.
1081 if (Serializer::enabled()) {
1082 ExternalReference roots_array_start =
1083 ExternalReference::roots_array_start(isolate());
1084 mov(scratch, Immediate(Heap::kHashSeedRootIndex));
1086 Operand::StaticArray(scratch, times_pointer_size, roots_array_start));
1090 int32_t seed = isolate()->heap()->HashSeed();
1091 xor_(r0, Immediate(seed));
1094 // hash = ~hash + (hash << 15);
1099 // hash = hash ^ (hash >> 12);
1103 // hash = hash + (hash << 2);
1104 lea(r0, Operand(r0, r0, times_4, 0));
1105 // hash = hash ^ (hash >> 4);
1109 // hash = hash * 2057;
1111 // hash = hash ^ (hash >> 16);
1119 void MacroAssembler::LoadFromNumberDictionary(Label* miss,
1128 // elements - holds the slow-case elements of the receiver and is unchanged.
1130 // key - holds the smi key on entry and is unchanged.
1132 // Scratch registers:
1134 // r0 - holds the untagged key on entry and holds the hash once computed.
1136 // r1 - used to hold the capacity mask of the dictionary
1138 // r2 - used for the index into the dictionary.
1140 // result - holds the result on exit if the load succeeds and we fall through.
1144 GetNumberHash(r0, r1);
1146 // Compute capacity mask.
1147 mov(r1, FieldOperand(elements, SeededNumberDictionary::kCapacityOffset));
1148 shr(r1, kSmiTagSize); // convert smi to int
1151 // Generate an unrolled loop that performs a few probes before giving up.
1152 const int kProbes = 4;
1153 for (int i = 0; i < kProbes; i++) {
1154 // Use r2 for index calculations and keep the hash intact in r0.
1156 // Compute the masked index: (hash + i + i * i) & mask.
1158 add(r2, Immediate(SeededNumberDictionary::GetProbeOffset(i)));
1162 // Scale the index by multiplying by the entry size.
1163 ASSERT(SeededNumberDictionary::kEntrySize == 3);
1164 lea(r2, Operand(r2, r2, times_2, 0)); // r2 = r2 * 3
1166 // Check if the key matches.
1167 cmp(key, FieldOperand(elements,
1170 SeededNumberDictionary::kElementsStartOffset));
1171 if (i != (kProbes - 1)) {
1179 // Check that the value is a normal propety.
1180 const int kDetailsOffset =
1181 SeededNumberDictionary::kElementsStartOffset + 2 * kPointerSize;
1182 ASSERT_EQ(NORMAL, 0);
1183 test(FieldOperand(elements, r2, times_pointer_size, kDetailsOffset),
1184 Immediate(PropertyDetails::TypeField::kMask << kSmiTagSize));
1187 // Get the value at the masked, scaled index.
1188 const int kValueOffset =
1189 SeededNumberDictionary::kElementsStartOffset + kPointerSize;
1190 mov(result, FieldOperand(elements, r2, times_pointer_size, kValueOffset));
1194 void MacroAssembler::LoadAllocationTopHelper(Register result,
1196 AllocationFlags flags) {
1197 ExternalReference allocation_top =
1198 AllocationUtils::GetAllocationTopReference(isolate(), flags);
1200 // Just return if allocation top is already known.
1201 if ((flags & RESULT_CONTAINS_TOP) != 0) {
1202 // No use of scratch if allocation top is provided.
1203 ASSERT(scratch.is(no_reg));
1205 // Assert that result actually contains top on entry.
1206 cmp(result, Operand::StaticVariable(allocation_top));
1207 Check(equal, "Unexpected allocation top");
1212 // Move address of new object to result. Use scratch register if available.
1213 if (scratch.is(no_reg)) {
1214 mov(result, Operand::StaticVariable(allocation_top));
1216 mov(scratch, Immediate(allocation_top));
1217 mov(result, Operand(scratch, 0));
1222 void MacroAssembler::UpdateAllocationTopHelper(Register result_end,
1224 AllocationFlags flags) {
1225 if (emit_debug_code()) {
1226 test(result_end, Immediate(kObjectAlignmentMask));
1227 Check(zero, "Unaligned allocation in new space");
1230 ExternalReference allocation_top =
1231 AllocationUtils::GetAllocationTopReference(isolate(), flags);
1233 // Update new top. Use scratch if available.
1234 if (scratch.is(no_reg)) {
1235 mov(Operand::StaticVariable(allocation_top), result_end);
1237 mov(Operand(scratch, 0), result_end);
1242 void MacroAssembler::Allocate(int object_size,
1244 Register result_end,
1247 AllocationFlags flags) {
1248 ASSERT((flags & (RESULT_CONTAINS_TOP | SIZE_IN_WORDS)) == 0);
1249 if (!FLAG_inline_new) {
1250 if (emit_debug_code()) {
1251 // Trash the registers to simulate an allocation failure.
1252 mov(result, Immediate(0x7091));
1253 if (result_end.is_valid()) {
1254 mov(result_end, Immediate(0x7191));
1256 if (scratch.is_valid()) {
1257 mov(scratch, Immediate(0x7291));
1263 ASSERT(!result.is(result_end));
1265 // Load address of new object into result.
1266 LoadAllocationTopHelper(result, scratch, flags);
1268 // Align the next allocation. Storing the filler map without checking top is
1269 // always safe because the limit of the heap is always aligned.
1270 if ((flags & DOUBLE_ALIGNMENT) != 0) {
1271 ASSERT((flags & PRETENURE_OLD_POINTER_SPACE) == 0);
1272 ASSERT(kPointerAlignment * 2 == kDoubleAlignment);
1274 test(result, Immediate(kDoubleAlignmentMask));
1275 j(zero, &aligned, Label::kNear);
1276 mov(Operand(result, 0),
1277 Immediate(isolate()->factory()->one_pointer_filler_map()));
1278 add(result, Immediate(kDoubleSize / 2));
1282 Register top_reg = result_end.is_valid() ? result_end : result;
1284 // Calculate new top and bail out if space is exhausted.
1285 ExternalReference allocation_limit =
1286 AllocationUtils::GetAllocationLimitReference(isolate(), flags);
1288 if (!top_reg.is(result)) {
1289 mov(top_reg, result);
1291 add(top_reg, Immediate(object_size));
1292 j(carry, gc_required);
1293 cmp(top_reg, Operand::StaticVariable(allocation_limit));
1294 j(above, gc_required);
1296 // Update allocation top.
1297 UpdateAllocationTopHelper(top_reg, scratch, flags);
1299 // Tag result if requested.
1300 bool tag_result = (flags & TAG_OBJECT) != 0;
1301 if (top_reg.is(result)) {
1303 sub(result, Immediate(object_size - kHeapObjectTag));
1305 sub(result, Immediate(object_size));
1307 } else if (tag_result) {
1308 ASSERT(kHeapObjectTag == 1);
1314 void MacroAssembler::Allocate(int header_size,
1315 ScaleFactor element_size,
1316 Register element_count,
1317 RegisterValueType element_count_type,
1319 Register result_end,
1322 AllocationFlags flags) {
1323 ASSERT((flags & SIZE_IN_WORDS) == 0);
1324 if (!FLAG_inline_new) {
1325 if (emit_debug_code()) {
1326 // Trash the registers to simulate an allocation failure.
1327 mov(result, Immediate(0x7091));
1328 mov(result_end, Immediate(0x7191));
1329 if (scratch.is_valid()) {
1330 mov(scratch, Immediate(0x7291));
1332 // Register element_count is not modified by the function.
1337 ASSERT(!result.is(result_end));
1339 // Load address of new object into result.
1340 LoadAllocationTopHelper(result, scratch, flags);
1342 // Align the next allocation. Storing the filler map without checking top is
1343 // always safe because the limit of the heap is always aligned.
1344 if ((flags & DOUBLE_ALIGNMENT) != 0) {
1345 ASSERT((flags & PRETENURE_OLD_POINTER_SPACE) == 0);
1346 ASSERT(kPointerAlignment * 2 == kDoubleAlignment);
1348 test(result, Immediate(kDoubleAlignmentMask));
1349 j(zero, &aligned, Label::kNear);
1350 mov(Operand(result, 0),
1351 Immediate(isolate()->factory()->one_pointer_filler_map()));
1352 add(result, Immediate(kDoubleSize / 2));
1356 // Calculate new top and bail out if space is exhausted.
1357 ExternalReference allocation_limit =
1358 AllocationUtils::GetAllocationLimitReference(isolate(), flags);
1360 // We assume that element_count*element_size + header_size does not
1362 if (element_count_type == REGISTER_VALUE_IS_SMI) {
1363 STATIC_ASSERT(static_cast<ScaleFactor>(times_2 - 1) == times_1);
1364 STATIC_ASSERT(static_cast<ScaleFactor>(times_4 - 1) == times_2);
1365 STATIC_ASSERT(static_cast<ScaleFactor>(times_8 - 1) == times_4);
1366 ASSERT(element_size >= times_2);
1367 ASSERT(kSmiTagSize == 1);
1368 element_size = static_cast<ScaleFactor>(element_size - 1);
1370 ASSERT(element_count_type == REGISTER_VALUE_IS_INT32);
1372 lea(result_end, Operand(element_count, element_size, header_size));
1373 add(result_end, result);
1374 j(carry, gc_required);
1375 cmp(result_end, Operand::StaticVariable(allocation_limit));
1376 j(above, gc_required);
1378 if ((flags & TAG_OBJECT) != 0) {
1379 ASSERT(kHeapObjectTag == 1);
1383 // Update allocation top.
1384 UpdateAllocationTopHelper(result_end, scratch, flags);
1388 void MacroAssembler::Allocate(Register object_size,
1390 Register result_end,
1393 AllocationFlags flags) {
1394 ASSERT((flags & (RESULT_CONTAINS_TOP | SIZE_IN_WORDS)) == 0);
1395 if (!FLAG_inline_new) {
1396 if (emit_debug_code()) {
1397 // Trash the registers to simulate an allocation failure.
1398 mov(result, Immediate(0x7091));
1399 mov(result_end, Immediate(0x7191));
1400 if (scratch.is_valid()) {
1401 mov(scratch, Immediate(0x7291));
1403 // object_size is left unchanged by this function.
1408 ASSERT(!result.is(result_end));
1410 // Load address of new object into result.
1411 LoadAllocationTopHelper(result, scratch, flags);
1413 // Align the next allocation. Storing the filler map without checking top is
1414 // always safe because the limit of the heap is always aligned.
1415 if ((flags & DOUBLE_ALIGNMENT) != 0) {
1416 ASSERT((flags & PRETENURE_OLD_POINTER_SPACE) == 0);
1417 ASSERT(kPointerAlignment * 2 == kDoubleAlignment);
1419 test(result, Immediate(kDoubleAlignmentMask));
1420 j(zero, &aligned, Label::kNear);
1421 mov(Operand(result, 0),
1422 Immediate(isolate()->factory()->one_pointer_filler_map()));
1423 add(result, Immediate(kDoubleSize / 2));
1427 // Calculate new top and bail out if space is exhausted.
1428 ExternalReference allocation_limit =
1429 AllocationUtils::GetAllocationLimitReference(isolate(), flags);
1431 if (!object_size.is(result_end)) {
1432 mov(result_end, object_size);
1434 add(result_end, result);
1435 j(carry, gc_required);
1436 cmp(result_end, Operand::StaticVariable(allocation_limit));
1437 j(above, gc_required);
1439 // Tag result if requested.
1440 if ((flags & TAG_OBJECT) != 0) {
1441 ASSERT(kHeapObjectTag == 1);
1445 // Update allocation top.
1446 UpdateAllocationTopHelper(result_end, scratch, flags);
1450 void MacroAssembler::UndoAllocationInNewSpace(Register object) {
1451 ExternalReference new_space_allocation_top =
1452 ExternalReference::new_space_allocation_top_address(isolate());
1454 // Make sure the object has no tag before resetting top.
1455 and_(object, Immediate(~kHeapObjectTagMask));
1457 cmp(object, Operand::StaticVariable(new_space_allocation_top));
1458 Check(below, "Undo allocation of non allocated memory");
1460 mov(Operand::StaticVariable(new_space_allocation_top), object);
1464 void MacroAssembler::AllocateHeapNumber(Register result,
1467 Label* gc_required) {
1468 // Allocate heap number in new space.
1469 Allocate(HeapNumber::kSize, result, scratch1, scratch2, gc_required,
1473 mov(FieldOperand(result, HeapObject::kMapOffset),
1474 Immediate(isolate()->factory()->heap_number_map()));
1478 void MacroAssembler::AllocateTwoByteString(Register result,
1483 Label* gc_required) {
1484 // Calculate the number of bytes needed for the characters in the string while
1485 // observing object alignment.
1486 ASSERT((SeqTwoByteString::kHeaderSize & kObjectAlignmentMask) == 0);
1487 ASSERT(kShortSize == 2);
1488 // scratch1 = length * 2 + kObjectAlignmentMask.
1489 lea(scratch1, Operand(length, length, times_1, kObjectAlignmentMask));
1490 and_(scratch1, Immediate(~kObjectAlignmentMask));
1492 // Allocate two byte string in new space.
1493 Allocate(SeqTwoByteString::kHeaderSize,
1496 REGISTER_VALUE_IS_INT32,
1503 // Set the map, length and hash field.
1504 mov(FieldOperand(result, HeapObject::kMapOffset),
1505 Immediate(isolate()->factory()->string_map()));
1506 mov(scratch1, length);
1508 mov(FieldOperand(result, String::kLengthOffset), scratch1);
1509 mov(FieldOperand(result, String::kHashFieldOffset),
1510 Immediate(String::kEmptyHashField));
1514 void MacroAssembler::AllocateAsciiString(Register result,
1519 Label* gc_required) {
1520 // Calculate the number of bytes needed for the characters in the string while
1521 // observing object alignment.
1522 ASSERT((SeqOneByteString::kHeaderSize & kObjectAlignmentMask) == 0);
1523 mov(scratch1, length);
1524 ASSERT(kCharSize == 1);
1525 add(scratch1, Immediate(kObjectAlignmentMask));
1526 and_(scratch1, Immediate(~kObjectAlignmentMask));
1528 // Allocate ASCII string in new space.
1529 Allocate(SeqOneByteString::kHeaderSize,
1532 REGISTER_VALUE_IS_INT32,
1539 // Set the map, length and hash field.
1540 mov(FieldOperand(result, HeapObject::kMapOffset),
1541 Immediate(isolate()->factory()->ascii_string_map()));
1542 mov(scratch1, length);
1544 mov(FieldOperand(result, String::kLengthOffset), scratch1);
1545 mov(FieldOperand(result, String::kHashFieldOffset),
1546 Immediate(String::kEmptyHashField));
1550 void MacroAssembler::AllocateAsciiString(Register result,
1554 Label* gc_required) {
1557 // Allocate ASCII string in new space.
1558 Allocate(SeqOneByteString::SizeFor(length), result, scratch1, scratch2,
1559 gc_required, TAG_OBJECT);
1561 // Set the map, length and hash field.
1562 mov(FieldOperand(result, HeapObject::kMapOffset),
1563 Immediate(isolate()->factory()->ascii_string_map()));
1564 mov(FieldOperand(result, String::kLengthOffset),
1565 Immediate(Smi::FromInt(length)));
1566 mov(FieldOperand(result, String::kHashFieldOffset),
1567 Immediate(String::kEmptyHashField));
1571 void MacroAssembler::AllocateTwoByteConsString(Register result,
1574 Label* gc_required) {
1575 // Allocate heap number in new space.
1576 Allocate(ConsString::kSize, result, scratch1, scratch2, gc_required,
1579 // Set the map. The other fields are left uninitialized.
1580 mov(FieldOperand(result, HeapObject::kMapOffset),
1581 Immediate(isolate()->factory()->cons_string_map()));
1585 void MacroAssembler::AllocateAsciiConsString(Register result,
1588 Label* gc_required) {
1589 Label allocate_new_space, install_map;
1590 AllocationFlags flags = TAG_OBJECT;
1592 ExternalReference high_promotion_mode = ExternalReference::
1593 new_space_high_promotion_mode_active_address(isolate());
1595 test(Operand::StaticVariable(high_promotion_mode), Immediate(1));
1596 j(zero, &allocate_new_space);
1598 Allocate(ConsString::kSize,
1603 static_cast<AllocationFlags>(flags | PRETENURE_OLD_POINTER_SPACE));
1606 bind(&allocate_new_space);
1607 Allocate(ConsString::kSize,
1615 // Set the map. The other fields are left uninitialized.
1616 mov(FieldOperand(result, HeapObject::kMapOffset),
1617 Immediate(isolate()->factory()->cons_ascii_string_map()));
1621 void MacroAssembler::AllocateTwoByteSlicedString(Register result,
1624 Label* gc_required) {
1625 // Allocate heap number in new space.
1626 Allocate(SlicedString::kSize, result, scratch1, scratch2, gc_required,
1629 // Set the map. The other fields are left uninitialized.
1630 mov(FieldOperand(result, HeapObject::kMapOffset),
1631 Immediate(isolate()->factory()->sliced_string_map()));
1635 void MacroAssembler::AllocateAsciiSlicedString(Register result,
1638 Label* gc_required) {
1639 // Allocate heap number in new space.
1640 Allocate(SlicedString::kSize, result, scratch1, scratch2, gc_required,
1643 // Set the map. The other fields are left uninitialized.
1644 mov(FieldOperand(result, HeapObject::kMapOffset),
1645 Immediate(isolate()->factory()->sliced_ascii_string_map()));
1649 // Copy memory, byte-by-byte, from source to destination. Not optimized for
1650 // long or aligned copies. The contents of scratch and length are destroyed.
1651 // Source and destination are incremented by length.
1652 // Many variants of movsb, loop unrolling, word moves, and indexed operands
1653 // have been tried here already, and this is fastest.
1654 // A simpler loop is faster on small copies, but 30% slower on large ones.
1655 // The cld() instruction must have been emitted, to set the direction flag(),
1656 // before calling this function.
1657 void MacroAssembler::CopyBytes(Register source,
1658 Register destination,
1661 Label loop, done, short_string, short_loop;
1662 // Experimentation shows that the short string loop is faster if length < 10.
1663 cmp(length, Immediate(10));
1664 j(less_equal, &short_string);
1666 ASSERT(source.is(esi));
1667 ASSERT(destination.is(edi));
1668 ASSERT(length.is(ecx));
1670 // Because source is 4-byte aligned in our uses of this function,
1671 // we keep source aligned for the rep_movs call by copying the odd bytes
1672 // at the end of the ranges.
1673 mov(scratch, Operand(source, length, times_1, -4));
1674 mov(Operand(destination, length, times_1, -4), scratch);
1678 and_(scratch, Immediate(0x3));
1679 add(destination, scratch);
1682 bind(&short_string);
1683 test(length, length);
1687 mov_b(scratch, Operand(source, 0));
1688 mov_b(Operand(destination, 0), scratch);
1692 j(not_zero, &short_loop);
1698 void MacroAssembler::InitializeFieldsWithFiller(Register start_offset,
1699 Register end_offset,
1704 mov(Operand(start_offset, 0), filler);
1705 add(start_offset, Immediate(kPointerSize));
1707 cmp(start_offset, end_offset);
1712 void MacroAssembler::BooleanBitTest(Register object,
1715 bit_index += kSmiTagSize + kSmiShiftSize;
1716 ASSERT(IsPowerOf2(kBitsPerByte));
1717 int byte_index = bit_index / kBitsPerByte;
1718 int byte_bit_index = bit_index & (kBitsPerByte - 1);
1719 test_b(FieldOperand(object, field_offset + byte_index),
1720 static_cast<byte>(1 << byte_bit_index));
1725 void MacroAssembler::NegativeZeroTest(Register result,
1727 Label* then_label) {
1729 test(result, result);
1732 j(sign, then_label);
1737 void MacroAssembler::NegativeZeroTest(Register result,
1741 Label* then_label) {
1743 test(result, result);
1747 j(sign, then_label);
1752 void MacroAssembler::TryGetFunctionPrototype(Register function,
1756 bool miss_on_bound_function) {
1757 // Check that the receiver isn't a smi.
1758 JumpIfSmi(function, miss);
1760 // Check that the function really is a function.
1761 CmpObjectType(function, JS_FUNCTION_TYPE, result);
1764 if (miss_on_bound_function) {
1765 // If a bound function, go to miss label.
1767 FieldOperand(function, JSFunction::kSharedFunctionInfoOffset));
1768 BooleanBitTest(scratch, SharedFunctionInfo::kCompilerHintsOffset,
1769 SharedFunctionInfo::kBoundFunction);
1773 // Make sure that the function has an instance prototype.
1775 movzx_b(scratch, FieldOperand(result, Map::kBitFieldOffset));
1776 test(scratch, Immediate(1 << Map::kHasNonInstancePrototype));
1777 j(not_zero, &non_instance);
1779 // Get the prototype or initial map from the function.
1781 FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
1783 // If the prototype or initial map is the hole, don't return it and
1784 // simply miss the cache instead. This will allow us to allocate a
1785 // prototype object on-demand in the runtime system.
1786 cmp(result, Immediate(isolate()->factory()->the_hole_value()));
1789 // If the function does not have an initial map, we're done.
1791 CmpObjectType(result, MAP_TYPE, scratch);
1792 j(not_equal, &done);
1794 // Get the prototype from the initial map.
1795 mov(result, FieldOperand(result, Map::kPrototypeOffset));
1798 // Non-instance prototype: Fetch prototype from constructor field
1800 bind(&non_instance);
1801 mov(result, FieldOperand(result, Map::kConstructorOffset));
1808 void MacroAssembler::CallStub(CodeStub* stub, TypeFeedbackId ast_id) {
1809 ASSERT(AllowThisStubCall(stub)); // Calls are not allowed in some stubs.
1810 call(stub->GetCode(isolate()), RelocInfo::CODE_TARGET, ast_id);
1814 void MacroAssembler::TailCallStub(CodeStub* stub) {
1815 ASSERT(allow_stub_calls_ ||
1816 stub->CompilingCallsToThisStubIsGCSafe(isolate()));
1817 jmp(stub->GetCode(isolate()), RelocInfo::CODE_TARGET);
1821 void MacroAssembler::StubReturn(int argc) {
1822 ASSERT(argc >= 1 && generating_stub());
1823 ret((argc - 1) * kPointerSize);
1827 bool MacroAssembler::AllowThisStubCall(CodeStub* stub) {
1828 if (!has_frame_ && stub->SometimesSetsUpAFrame()) return false;
1829 return allow_stub_calls_ || stub->CompilingCallsToThisStubIsGCSafe(isolate());
1833 void MacroAssembler::IllegalOperation(int num_arguments) {
1834 if (num_arguments > 0) {
1835 add(esp, Immediate(num_arguments * kPointerSize));
1837 mov(eax, Immediate(isolate()->factory()->undefined_value()));
1841 void MacroAssembler::IndexFromHash(Register hash, Register index) {
1842 // The assert checks that the constants for the maximum number of digits
1843 // for an array index cached in the hash field and the number of bits
1844 // reserved for it does not conflict.
1845 ASSERT(TenToThe(String::kMaxCachedArrayIndexLength) <
1846 (1 << String::kArrayIndexValueBits));
1847 // We want the smi-tagged index in key. kArrayIndexValueMask has zeros in
1848 // the low kHashShift bits.
1849 and_(hash, String::kArrayIndexValueMask);
1850 STATIC_ASSERT(String::kHashShift >= kSmiTagSize && kSmiTag == 0);
1851 if (String::kHashShift > kSmiTagSize) {
1852 shr(hash, String::kHashShift - kSmiTagSize);
1854 if (!index.is(hash)) {
1860 void MacroAssembler::CallRuntime(Runtime::FunctionId id, int num_arguments) {
1861 CallRuntime(Runtime::FunctionForId(id), num_arguments);
1865 void MacroAssembler::CallRuntimeSaveDoubles(Runtime::FunctionId id) {
1866 const Runtime::Function* function = Runtime::FunctionForId(id);
1867 Set(eax, Immediate(function->nargs));
1868 mov(ebx, Immediate(ExternalReference(function, isolate())));
1869 CEntryStub ces(1, CpuFeatures::IsSupported(SSE2) ? kSaveFPRegs
1875 void MacroAssembler::CallRuntime(const Runtime::Function* f,
1876 int num_arguments) {
1877 // If the expected number of arguments of the runtime function is
1878 // constant, we check that the actual number of arguments match the
1880 if (f->nargs >= 0 && f->nargs != num_arguments) {
1881 IllegalOperation(num_arguments);
1885 // TODO(1236192): Most runtime routines don't need the number of
1886 // arguments passed in because it is constant. At some point we
1887 // should remove this need and make the runtime routine entry code
1889 Set(eax, Immediate(num_arguments));
1890 mov(ebx, Immediate(ExternalReference(f, isolate())));
1896 void MacroAssembler::CallExternalReference(ExternalReference ref,
1897 int num_arguments) {
1898 mov(eax, Immediate(num_arguments));
1899 mov(ebx, Immediate(ref));
1906 void MacroAssembler::TailCallExternalReference(const ExternalReference& ext,
1909 // TODO(1236192): Most runtime routines don't need the number of
1910 // arguments passed in because it is constant. At some point we
1911 // should remove this need and make the runtime routine entry code
1913 Set(eax, Immediate(num_arguments));
1914 JumpToExternalReference(ext);
1918 void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid,
1921 TailCallExternalReference(ExternalReference(fid, isolate()),
1927 // If true, a Handle<T> returned by value from a function with cdecl calling
1928 // convention will be returned directly as a value of location_ field in a
1930 // If false, it is returned as a pointer to a preallocated by caller memory
1931 // region. Pointer to this region should be passed to a function as an
1932 // implicit first argument.
1933 #if defined(USING_BSD_ABI) || defined(__MINGW32__) || defined(__CYGWIN__)
1934 static const bool kReturnHandlesDirectly = true;
1936 static const bool kReturnHandlesDirectly = false;
1940 Operand ApiParameterOperand(int index, bool returns_handle) {
1941 int offset = (index +(kReturnHandlesDirectly || !returns_handle ? 0 : 1));
1942 return Operand(esp, offset * kPointerSize);
1946 void MacroAssembler::PrepareCallApiFunction(int argc, bool returns_handle) {
1947 if (kReturnHandlesDirectly || !returns_handle) {
1948 EnterApiExitFrame(argc);
1949 // When handles are returned directly we don't have to allocate extra
1950 // space for and pass an out parameter.
1951 if (emit_debug_code()) {
1952 mov(esi, Immediate(BitCast<int32_t>(kZapValue)));
1955 // We allocate two additional slots: return value and pointer to it.
1956 EnterApiExitFrame(argc + 2);
1958 // The argument slots are filled as follows:
1960 // n + 1: output slot
1964 // 0: pointer to the output slot
1966 lea(esi, Operand(esp, (argc + 1) * kPointerSize));
1967 mov(Operand(esp, 0 * kPointerSize), esi);
1968 if (emit_debug_code()) {
1969 mov(Operand(esi, 0), Immediate(0));
1975 void MacroAssembler::CallApiFunctionAndReturn(Address function_address,
1977 bool returns_handle,
1978 int return_value_offset) {
1979 ExternalReference next_address =
1980 ExternalReference::handle_scope_next_address(isolate());
1981 ExternalReference limit_address =
1982 ExternalReference::handle_scope_limit_address(isolate());
1983 ExternalReference level_address =
1984 ExternalReference::handle_scope_level_address(isolate());
1986 // Allocate HandleScope in callee-save registers.
1987 mov(ebx, Operand::StaticVariable(next_address));
1988 mov(edi, Operand::StaticVariable(limit_address));
1989 add(Operand::StaticVariable(level_address), Immediate(1));
1991 if (FLAG_log_timer_events) {
1992 FrameScope frame(this, StackFrame::MANUAL);
1993 PushSafepointRegisters();
1994 PrepareCallCFunction(1, eax);
1995 mov(Operand(esp, 0),
1996 Immediate(ExternalReference::isolate_address(isolate())));
1997 CallCFunction(ExternalReference::log_enter_external_function(isolate()), 1);
1998 PopSafepointRegisters();
2001 // Call the api function.
2002 call(function_address, RelocInfo::RUNTIME_ENTRY);
2004 if (FLAG_log_timer_events) {
2005 FrameScope frame(this, StackFrame::MANUAL);
2006 PushSafepointRegisters();
2007 PrepareCallCFunction(1, eax);
2008 mov(Operand(esp, 0),
2009 Immediate(ExternalReference::isolate_address(isolate())));
2010 CallCFunction(ExternalReference::log_leave_external_function(isolate()), 1);
2011 PopSafepointRegisters();
2015 if (returns_handle) {
2016 if (!kReturnHandlesDirectly) {
2017 // PrepareCallApiFunction saved pointer to the output slot into
2018 // callee-save register esi.
2019 mov(eax, Operand(esi, 0));
2022 // Check if the result handle holds 0.
2024 j(zero, &empty_handle);
2025 // It was non-zero. Dereference to get the result value.
2026 mov(eax, Operand(eax, 0));
2028 bind(&empty_handle);
2030 // Load the value from ReturnValue
2031 mov(eax, Operand(ebp, return_value_offset * kPointerSize));
2033 Label promote_scheduled_exception;
2034 Label delete_allocated_handles;
2035 Label leave_exit_frame;
2038 // No more valid handles (the result handle was the last one). Restore
2039 // previous handle scope.
2040 mov(Operand::StaticVariable(next_address), ebx);
2041 sub(Operand::StaticVariable(level_address), Immediate(1));
2042 Assert(above_equal, "Invalid HandleScope level");
2043 cmp(edi, Operand::StaticVariable(limit_address));
2044 j(not_equal, &delete_allocated_handles);
2045 bind(&leave_exit_frame);
2047 // Check if the function scheduled an exception.
2048 ExternalReference scheduled_exception_address =
2049 ExternalReference::scheduled_exception_address(isolate());
2050 cmp(Operand::StaticVariable(scheduled_exception_address),
2051 Immediate(isolate()->factory()->the_hole_value()));
2052 j(not_equal, &promote_scheduled_exception);
2054 #if ENABLE_EXTRA_CHECKS
2055 // Check if the function returned a valid JavaScript value.
2057 Register return_value = eax;
2060 JumpIfSmi(return_value, &ok, Label::kNear);
2061 mov(map, FieldOperand(return_value, HeapObject::kMapOffset));
2063 CmpInstanceType(map, FIRST_NONSTRING_TYPE);
2064 j(below, &ok, Label::kNear);
2066 CmpInstanceType(map, FIRST_SPEC_OBJECT_TYPE);
2067 j(above_equal, &ok, Label::kNear);
2069 cmp(map, isolate()->factory()->heap_number_map());
2070 j(equal, &ok, Label::kNear);
2072 cmp(return_value, isolate()->factory()->undefined_value());
2073 j(equal, &ok, Label::kNear);
2075 cmp(return_value, isolate()->factory()->true_value());
2076 j(equal, &ok, Label::kNear);
2078 cmp(return_value, isolate()->factory()->false_value());
2079 j(equal, &ok, Label::kNear);
2081 cmp(return_value, isolate()->factory()->null_value());
2082 j(equal, &ok, Label::kNear);
2084 Abort("API call returned invalid object");
2089 LeaveApiExitFrame();
2090 ret(stack_space * kPointerSize);
2092 bind(&promote_scheduled_exception);
2093 TailCallRuntime(Runtime::kPromoteScheduledException, 0, 1);
2095 // HandleScope limit has changed. Delete allocated extensions.
2096 ExternalReference delete_extensions =
2097 ExternalReference::delete_handle_scope_extensions(isolate());
2098 bind(&delete_allocated_handles);
2099 mov(Operand::StaticVariable(limit_address), edi);
2101 mov(Operand(esp, 0),
2102 Immediate(ExternalReference::isolate_address(isolate())));
2103 mov(eax, Immediate(delete_extensions));
2106 jmp(&leave_exit_frame);
2110 void MacroAssembler::JumpToExternalReference(const ExternalReference& ext) {
2111 // Set the entry point and jump to the C entry runtime stub.
2112 mov(ebx, Immediate(ext));
2114 jmp(ces.GetCode(isolate()), RelocInfo::CODE_TARGET);
2118 void MacroAssembler::SetCallKind(Register dst, CallKind call_kind) {
2119 // This macro takes the dst register to make the code more readable
2120 // at the call sites. However, the dst register has to be ecx to
2121 // follow the calling convention which requires the call type to be
2123 ASSERT(dst.is(ecx));
2124 if (call_kind == CALL_AS_FUNCTION) {
2125 // Set to some non-zero smi by updating the least significant
2127 mov_b(dst, 1 << kSmiTagSize);
2129 // Set to smi zero by clearing the register.
2135 void MacroAssembler::InvokePrologue(const ParameterCount& expected,
2136 const ParameterCount& actual,
2137 Handle<Code> code_constant,
2138 const Operand& code_operand,
2140 bool* definitely_mismatches,
2142 Label::Distance done_near,
2143 const CallWrapper& call_wrapper,
2144 CallKind call_kind) {
2145 bool definitely_matches = false;
2146 *definitely_mismatches = false;
2148 if (expected.is_immediate()) {
2149 ASSERT(actual.is_immediate());
2150 if (expected.immediate() == actual.immediate()) {
2151 definitely_matches = true;
2153 mov(eax, actual.immediate());
2154 const int sentinel = SharedFunctionInfo::kDontAdaptArgumentsSentinel;
2155 if (expected.immediate() == sentinel) {
2156 // Don't worry about adapting arguments for builtins that
2157 // don't want that done. Skip adaption code by making it look
2158 // like we have a match between expected and actual number of
2160 definitely_matches = true;
2162 *definitely_mismatches = true;
2163 mov(ebx, expected.immediate());
2167 if (actual.is_immediate()) {
2168 // Expected is in register, actual is immediate. This is the
2169 // case when we invoke function values without going through the
2171 cmp(expected.reg(), actual.immediate());
2173 ASSERT(expected.reg().is(ebx));
2174 mov(eax, actual.immediate());
2175 } else if (!expected.reg().is(actual.reg())) {
2176 // Both expected and actual are in (different) registers. This
2177 // is the case when we invoke functions using call and apply.
2178 cmp(expected.reg(), actual.reg());
2180 ASSERT(actual.reg().is(eax));
2181 ASSERT(expected.reg().is(ebx));
2185 if (!definitely_matches) {
2186 Handle<Code> adaptor =
2187 isolate()->builtins()->ArgumentsAdaptorTrampoline();
2188 if (!code_constant.is_null()) {
2189 mov(edx, Immediate(code_constant));
2190 add(edx, Immediate(Code::kHeaderSize - kHeapObjectTag));
2191 } else if (!code_operand.is_reg(edx)) {
2192 mov(edx, code_operand);
2195 if (flag == CALL_FUNCTION) {
2196 call_wrapper.BeforeCall(CallSize(adaptor, RelocInfo::CODE_TARGET));
2197 SetCallKind(ecx, call_kind);
2198 call(adaptor, RelocInfo::CODE_TARGET);
2199 call_wrapper.AfterCall();
2200 if (!*definitely_mismatches) {
2201 jmp(done, done_near);
2204 SetCallKind(ecx, call_kind);
2205 jmp(adaptor, RelocInfo::CODE_TARGET);
2212 void MacroAssembler::InvokeCode(const Operand& code,
2213 const ParameterCount& expected,
2214 const ParameterCount& actual,
2216 const CallWrapper& call_wrapper,
2217 CallKind call_kind) {
2218 // You can't call a function without a valid frame.
2219 ASSERT(flag == JUMP_FUNCTION || has_frame());
2222 bool definitely_mismatches = false;
2223 InvokePrologue(expected, actual, Handle<Code>::null(), code,
2224 &done, &definitely_mismatches, flag, Label::kNear,
2225 call_wrapper, call_kind);
2226 if (!definitely_mismatches) {
2227 if (flag == CALL_FUNCTION) {
2228 call_wrapper.BeforeCall(CallSize(code));
2229 SetCallKind(ecx, call_kind);
2231 call_wrapper.AfterCall();
2233 ASSERT(flag == JUMP_FUNCTION);
2234 SetCallKind(ecx, call_kind);
2242 void MacroAssembler::InvokeCode(Handle<Code> code,
2243 const ParameterCount& expected,
2244 const ParameterCount& actual,
2245 RelocInfo::Mode rmode,
2247 const CallWrapper& call_wrapper,
2248 CallKind call_kind) {
2249 // You can't call a function without a valid frame.
2250 ASSERT(flag == JUMP_FUNCTION || has_frame());
2253 Operand dummy(eax, 0);
2254 bool definitely_mismatches = false;
2255 InvokePrologue(expected, actual, code, dummy, &done, &definitely_mismatches,
2256 flag, Label::kNear, call_wrapper, call_kind);
2257 if (!definitely_mismatches) {
2258 if (flag == CALL_FUNCTION) {
2259 call_wrapper.BeforeCall(CallSize(code, rmode));
2260 SetCallKind(ecx, call_kind);
2262 call_wrapper.AfterCall();
2264 ASSERT(flag == JUMP_FUNCTION);
2265 SetCallKind(ecx, call_kind);
2273 void MacroAssembler::InvokeFunction(Register fun,
2274 const ParameterCount& actual,
2276 const CallWrapper& call_wrapper,
2277 CallKind call_kind) {
2278 // You can't call a function without a valid frame.
2279 ASSERT(flag == JUMP_FUNCTION || has_frame());
2281 ASSERT(fun.is(edi));
2282 mov(edx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
2283 mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
2284 mov(ebx, FieldOperand(edx, SharedFunctionInfo::kFormalParameterCountOffset));
2287 ParameterCount expected(ebx);
2288 InvokeCode(FieldOperand(edi, JSFunction::kCodeEntryOffset),
2289 expected, actual, flag, call_wrapper, call_kind);
2293 void MacroAssembler::InvokeFunction(Handle<JSFunction> function,
2294 const ParameterCount& expected,
2295 const ParameterCount& actual,
2297 const CallWrapper& call_wrapper,
2298 CallKind call_kind) {
2299 // You can't call a function without a valid frame.
2300 ASSERT(flag == JUMP_FUNCTION || has_frame());
2302 // Get the function and setup the context.
2303 LoadHeapObject(edi, function);
2304 mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
2306 // We call indirectly through the code field in the function to
2307 // allow recompilation to take effect without changing any of the
2309 InvokeCode(FieldOperand(edi, JSFunction::kCodeEntryOffset),
2310 expected, actual, flag, call_wrapper, call_kind);
2314 void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id,
2316 const CallWrapper& call_wrapper) {
2317 // You can't call a builtin without a valid frame.
2318 ASSERT(flag == JUMP_FUNCTION || has_frame());
2320 // Rely on the assertion to check that the number of provided
2321 // arguments match the expected number of arguments. Fake a
2322 // parameter count to avoid emitting code to do the check.
2323 ParameterCount expected(0);
2324 GetBuiltinFunction(edi, id);
2325 InvokeCode(FieldOperand(edi, JSFunction::kCodeEntryOffset),
2326 expected, expected, flag, call_wrapper, CALL_AS_METHOD);
2330 void MacroAssembler::GetBuiltinFunction(Register target,
2331 Builtins::JavaScript id) {
2332 // Load the JavaScript builtin function from the builtins object.
2333 mov(target, Operand(esi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
2334 mov(target, FieldOperand(target, GlobalObject::kBuiltinsOffset));
2335 mov(target, FieldOperand(target,
2336 JSBuiltinsObject::OffsetOfFunctionWithId(id)));
2340 void MacroAssembler::GetBuiltinEntry(Register target, Builtins::JavaScript id) {
2341 ASSERT(!target.is(edi));
2342 // Load the JavaScript builtin function from the builtins object.
2343 GetBuiltinFunction(edi, id);
2344 // Load the code entry point from the function into the target register.
2345 mov(target, FieldOperand(edi, JSFunction::kCodeEntryOffset));
2349 void MacroAssembler::LoadContext(Register dst, int context_chain_length) {
2350 if (context_chain_length > 0) {
2351 // Move up the chain of contexts to the context containing the slot.
2352 mov(dst, Operand(esi, Context::SlotOffset(Context::PREVIOUS_INDEX)));
2353 for (int i = 1; i < context_chain_length; i++) {
2354 mov(dst, Operand(dst, Context::SlotOffset(Context::PREVIOUS_INDEX)));
2357 // Slot is in the current function context. Move it into the
2358 // destination register in case we store into it (the write barrier
2359 // cannot be allowed to destroy the context in esi).
2363 // We should not have found a with context by walking the context chain
2364 // (i.e., the static scope chain and runtime context chain do not agree).
2365 // A variable occurring in such a scope should have slot type LOOKUP and
2367 if (emit_debug_code()) {
2368 cmp(FieldOperand(dst, HeapObject::kMapOffset),
2369 isolate()->factory()->with_context_map());
2370 Check(not_equal, "Variable resolved to with context.");
2375 void MacroAssembler::LoadTransitionedArrayMapConditional(
2376 ElementsKind expected_kind,
2377 ElementsKind transitioned_kind,
2378 Register map_in_out,
2380 Label* no_map_match) {
2381 // Load the global or builtins object from the current context.
2382 mov(scratch, Operand(esi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
2383 mov(scratch, FieldOperand(scratch, GlobalObject::kNativeContextOffset));
2385 // Check that the function's map is the same as the expected cached map.
2386 mov(scratch, Operand(scratch,
2387 Context::SlotOffset(Context::JS_ARRAY_MAPS_INDEX)));
2389 size_t offset = expected_kind * kPointerSize +
2390 FixedArrayBase::kHeaderSize;
2391 cmp(map_in_out, FieldOperand(scratch, offset));
2392 j(not_equal, no_map_match);
2394 // Use the transitioned cached map.
2395 offset = transitioned_kind * kPointerSize +
2396 FixedArrayBase::kHeaderSize;
2397 mov(map_in_out, FieldOperand(scratch, offset));
2401 void MacroAssembler::LoadInitialArrayMap(
2402 Register function_in, Register scratch,
2403 Register map_out, bool can_have_holes) {
2404 ASSERT(!function_in.is(map_out));
2406 mov(map_out, FieldOperand(function_in,
2407 JSFunction::kPrototypeOrInitialMapOffset));
2408 if (!FLAG_smi_only_arrays) {
2409 ElementsKind kind = can_have_holes ? FAST_HOLEY_ELEMENTS : FAST_ELEMENTS;
2410 LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS,
2415 } else if (can_have_holes) {
2416 LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS,
2417 FAST_HOLEY_SMI_ELEMENTS,
2426 void MacroAssembler::LoadGlobalContext(Register global_context) {
2427 // Load the global or builtins object from the current context.
2429 Operand(esi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
2430 // Load the native context from the global or builtins object.
2432 FieldOperand(global_context, GlobalObject::kNativeContextOffset));
2436 void MacroAssembler::LoadGlobalFunction(int index, Register function) {
2437 // Load the global or builtins object from the current context.
2439 Operand(esi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
2440 // Load the native context from the global or builtins object.
2442 FieldOperand(function, GlobalObject::kNativeContextOffset));
2443 // Load the function from the native context.
2444 mov(function, Operand(function, Context::SlotOffset(index)));
2448 void MacroAssembler::LoadGlobalFunctionInitialMap(Register function,
2450 // Load the initial map. The global functions all have initial maps.
2451 mov(map, FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
2452 if (emit_debug_code()) {
2454 CheckMap(map, isolate()->factory()->meta_map(), &fail, DO_SMI_CHECK);
2457 Abort("Global functions must have initial map");
2463 // Store the value in register src in the safepoint register stack
2464 // slot for register dst.
2465 void MacroAssembler::StoreToSafepointRegisterSlot(Register dst, Register src) {
2466 mov(SafepointRegisterSlot(dst), src);
2470 void MacroAssembler::StoreToSafepointRegisterSlot(Register dst, Immediate src) {
2471 mov(SafepointRegisterSlot(dst), src);
2475 void MacroAssembler::LoadFromSafepointRegisterSlot(Register dst, Register src) {
2476 mov(dst, SafepointRegisterSlot(src));
2480 Operand MacroAssembler::SafepointRegisterSlot(Register reg) {
2481 return Operand(esp, SafepointRegisterStackIndex(reg.code()) * kPointerSize);
2485 int MacroAssembler::SafepointRegisterStackIndex(int reg_code) {
2486 // The registers are pushed starting with the lowest encoding,
2487 // which means that lowest encodings are furthest away from
2488 // the stack pointer.
2489 ASSERT(reg_code >= 0 && reg_code < kNumSafepointRegisters);
2490 return kNumSafepointRegisters - reg_code - 1;
2494 void MacroAssembler::LoadHeapObject(Register result,
2495 Handle<HeapObject> object) {
2496 AllowDeferredHandleDereference embedding_raw_address;
2497 if (isolate()->heap()->InNewSpace(*object)) {
2498 Handle<JSGlobalPropertyCell> cell =
2499 isolate()->factory()->NewJSGlobalPropertyCell(object);
2500 mov(result, Operand::Cell(cell));
2502 mov(result, object);
2507 void MacroAssembler::CmpHeapObject(Register reg, Handle<HeapObject> object) {
2508 AllowDeferredHandleDereference using_raw_address;
2509 if (isolate()->heap()->InNewSpace(*object)) {
2510 Handle<JSGlobalPropertyCell> cell =
2511 isolate()->factory()->NewJSGlobalPropertyCell(object);
2512 cmp(reg, Operand::Cell(cell));
2519 void MacroAssembler::PushHeapObject(Handle<HeapObject> object) {
2520 AllowDeferredHandleDereference using_raw_address;
2521 if (isolate()->heap()->InNewSpace(*object)) {
2522 Handle<JSGlobalPropertyCell> cell =
2523 isolate()->factory()->NewJSGlobalPropertyCell(object);
2524 push(Operand::Cell(cell));
2531 void MacroAssembler::Ret() {
2536 void MacroAssembler::Ret(int bytes_dropped, Register scratch) {
2537 if (is_uint16(bytes_dropped)) {
2541 add(esp, Immediate(bytes_dropped));
2548 void MacroAssembler::VerifyX87StackDepth(uint32_t depth) {
2549 // Make sure the floating point stack is either empty or has depth items.
2552 // The top-of-stack (tos) is 7 if there is one item pushed.
2553 int tos = (8 - depth) % 8;
2554 const int kTopMask = 0x3800;
2558 and_(eax, kTopMask);
2560 cmp(eax, Immediate(tos));
2561 Check(equal, "Unexpected FPU stack depth after instruction");
2567 void MacroAssembler::Drop(int stack_elements) {
2568 if (stack_elements > 0) {
2569 add(esp, Immediate(stack_elements * kPointerSize));
2574 void MacroAssembler::Move(Register dst, Register src) {
2581 void MacroAssembler::SetCounter(StatsCounter* counter, int value) {
2582 if (FLAG_native_code_counters && counter->Enabled()) {
2583 mov(Operand::StaticVariable(ExternalReference(counter)), Immediate(value));
2588 void MacroAssembler::IncrementCounter(StatsCounter* counter, int value) {
2590 if (FLAG_native_code_counters && counter->Enabled()) {
2591 Operand operand = Operand::StaticVariable(ExternalReference(counter));
2595 add(operand, Immediate(value));
2601 void MacroAssembler::DecrementCounter(StatsCounter* counter, int value) {
2603 if (FLAG_native_code_counters && counter->Enabled()) {
2604 Operand operand = Operand::StaticVariable(ExternalReference(counter));
2608 sub(operand, Immediate(value));
2614 void MacroAssembler::IncrementCounter(Condition cc,
2615 StatsCounter* counter,
2618 if (FLAG_native_code_counters && counter->Enabled()) {
2620 j(NegateCondition(cc), &skip);
2622 IncrementCounter(counter, value);
2629 void MacroAssembler::DecrementCounter(Condition cc,
2630 StatsCounter* counter,
2633 if (FLAG_native_code_counters && counter->Enabled()) {
2635 j(NegateCondition(cc), &skip);
2637 DecrementCounter(counter, value);
2644 void MacroAssembler::Assert(Condition cc, const char* msg) {
2645 if (emit_debug_code()) Check(cc, msg);
2649 void MacroAssembler::AssertFastElements(Register elements) {
2650 if (emit_debug_code()) {
2651 Factory* factory = isolate()->factory();
2653 cmp(FieldOperand(elements, HeapObject::kMapOffset),
2654 Immediate(factory->fixed_array_map()));
2656 cmp(FieldOperand(elements, HeapObject::kMapOffset),
2657 Immediate(factory->fixed_double_array_map()));
2659 cmp(FieldOperand(elements, HeapObject::kMapOffset),
2660 Immediate(factory->fixed_cow_array_map()));
2662 Abort("JSObject with fast elements map has slow elements");
2668 void MacroAssembler::Check(Condition cc, const char* msg) {
2672 // will not return here
2677 void MacroAssembler::CheckStackAlignment() {
2678 int frame_alignment = OS::ActivationFrameAlignment();
2679 int frame_alignment_mask = frame_alignment - 1;
2680 if (frame_alignment > kPointerSize) {
2681 ASSERT(IsPowerOf2(frame_alignment));
2682 Label alignment_as_expected;
2683 test(esp, Immediate(frame_alignment_mask));
2684 j(zero, &alignment_as_expected);
2685 // Abort if stack is not aligned.
2687 bind(&alignment_as_expected);
2692 void MacroAssembler::Abort(const char* msg) {
2693 // We want to pass the msg string like a smi to avoid GC
2694 // problems, however msg is not guaranteed to be aligned
2695 // properly. Instead, we pass an aligned pointer that is
2696 // a proper v8 smi, but also pass the alignment difference
2697 // from the real pointer as a smi.
2698 intptr_t p1 = reinterpret_cast<intptr_t>(msg);
2699 intptr_t p0 = (p1 & ~kSmiTagMask) + kSmiTag;
2700 ASSERT(reinterpret_cast<Object*>(p0)->IsSmi());
2703 RecordComment("Abort message: ");
2709 push(Immediate(p0));
2710 push(Immediate(reinterpret_cast<intptr_t>(Smi::FromInt(p1 - p0))));
2711 // Disable stub call restrictions to always allow calls to abort.
2713 // We don't actually want to generate a pile of code for this, so just
2714 // claim there is a stack frame, without generating one.
2715 FrameScope scope(this, StackFrame::NONE);
2716 CallRuntime(Runtime::kAbort, 2);
2718 CallRuntime(Runtime::kAbort, 2);
2720 // will not return here
2725 void MacroAssembler::LoadInstanceDescriptors(Register map,
2726 Register descriptors) {
2727 mov(descriptors, FieldOperand(map, Map::kDescriptorsOffset));
2731 void MacroAssembler::NumberOfOwnDescriptors(Register dst, Register map) {
2732 mov(dst, FieldOperand(map, Map::kBitField3Offset));
2733 DecodeField<Map::NumberOfOwnDescriptorsBits>(dst);
2737 void MacroAssembler::LoadPowerOf2(XMMRegister dst,
2740 ASSERT(is_uintn(power + HeapNumber::kExponentBias,
2741 HeapNumber::kExponentBits));
2742 mov(scratch, Immediate(power + HeapNumber::kExponentBias));
2744 psllq(dst, HeapNumber::kMantissaBits);
2748 void MacroAssembler::JumpIfInstanceTypeIsNotSequentialAscii(
2749 Register instance_type,
2752 if (!scratch.is(instance_type)) {
2753 mov(scratch, instance_type);
2756 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask);
2757 cmp(scratch, kStringTag | kSeqStringTag | kOneByteStringTag);
2758 j(not_equal, failure);
2762 void MacroAssembler::JumpIfNotBothSequentialAsciiStrings(Register object1,
2767 // Check that both objects are not smis.
2768 STATIC_ASSERT(kSmiTag == 0);
2769 mov(scratch1, object1);
2770 and_(scratch1, object2);
2771 JumpIfSmi(scratch1, failure);
2773 // Load instance type for both strings.
2774 mov(scratch1, FieldOperand(object1, HeapObject::kMapOffset));
2775 mov(scratch2, FieldOperand(object2, HeapObject::kMapOffset));
2776 movzx_b(scratch1, FieldOperand(scratch1, Map::kInstanceTypeOffset));
2777 movzx_b(scratch2, FieldOperand(scratch2, Map::kInstanceTypeOffset));
2779 // Check that both are flat ASCII strings.
2780 const int kFlatAsciiStringMask =
2781 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
2782 const int kFlatAsciiStringTag = ASCII_STRING_TYPE;
2783 // Interleave bits from both instance types and compare them in one check.
2784 ASSERT_EQ(0, kFlatAsciiStringMask & (kFlatAsciiStringMask << 3));
2785 and_(scratch1, kFlatAsciiStringMask);
2786 and_(scratch2, kFlatAsciiStringMask);
2787 lea(scratch1, Operand(scratch1, scratch2, times_8, 0));
2788 cmp(scratch1, kFlatAsciiStringTag | (kFlatAsciiStringTag << 3));
2789 j(not_equal, failure);
2793 void MacroAssembler::PrepareCallCFunction(int num_arguments, Register scratch) {
2794 int frame_alignment = OS::ActivationFrameAlignment();
2795 if (frame_alignment != 0) {
2796 // Make stack end at alignment and make room for num_arguments words
2797 // and the original value of esp.
2799 sub(esp, Immediate((num_arguments + 1) * kPointerSize));
2800 ASSERT(IsPowerOf2(frame_alignment));
2801 and_(esp, -frame_alignment);
2802 mov(Operand(esp, num_arguments * kPointerSize), scratch);
2804 sub(esp, Immediate(num_arguments * kPointerSize));
2809 void MacroAssembler::CallCFunction(ExternalReference function,
2810 int num_arguments) {
2811 // Trashing eax is ok as it will be the return value.
2812 mov(eax, Immediate(function));
2813 CallCFunction(eax, num_arguments);
2817 void MacroAssembler::CallCFunction(Register function,
2818 int num_arguments) {
2819 ASSERT(has_frame());
2820 // Check stack alignment.
2821 if (emit_debug_code()) {
2822 CheckStackAlignment();
2826 if (OS::ActivationFrameAlignment() != 0) {
2827 mov(esp, Operand(esp, num_arguments * kPointerSize));
2829 add(esp, Immediate(num_arguments * kPointerSize));
2834 bool AreAliased(Register r1, Register r2, Register r3, Register r4) {
2835 if (r1.is(r2)) return true;
2836 if (r1.is(r3)) return true;
2837 if (r1.is(r4)) return true;
2838 if (r2.is(r3)) return true;
2839 if (r2.is(r4)) return true;
2840 if (r3.is(r4)) return true;
2845 CodePatcher::CodePatcher(byte* address, int size)
2846 : address_(address),
2848 masm_(NULL, address, size + Assembler::kGap) {
2849 // Create a new macro assembler pointing to the address of the code to patch.
2850 // The size is adjusted with kGap on order for the assembler to generate size
2851 // bytes of instructions without failing with buffer size constraints.
2852 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
2856 CodePatcher::~CodePatcher() {
2857 // Indicate that code has changed.
2858 CPU::FlushICache(address_, size_);
2860 // Check that the code was patched as expected.
2861 ASSERT(masm_.pc_ == address_ + size_);
2862 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
2866 void MacroAssembler::CheckPageFlag(
2871 Label* condition_met,
2872 Label::Distance condition_met_distance) {
2873 ASSERT(cc == zero || cc == not_zero);
2874 if (scratch.is(object)) {
2875 and_(scratch, Immediate(~Page::kPageAlignmentMask));
2877 mov(scratch, Immediate(~Page::kPageAlignmentMask));
2878 and_(scratch, object);
2880 if (mask < (1 << kBitsPerByte)) {
2881 test_b(Operand(scratch, MemoryChunk::kFlagsOffset),
2882 static_cast<uint8_t>(mask));
2884 test(Operand(scratch, MemoryChunk::kFlagsOffset), Immediate(mask));
2886 j(cc, condition_met, condition_met_distance);
2890 void MacroAssembler::CheckPageFlagForMap(
2894 Label* condition_met,
2895 Label::Distance condition_met_distance) {
2896 ASSERT(cc == zero || cc == not_zero);
2897 Page* page = Page::FromAddress(map->address());
2898 ExternalReference reference(ExternalReference::page_flags(page));
2899 // The inlined static address check of the page's flags relies
2900 // on maps never being compacted.
2901 ASSERT(!isolate()->heap()->mark_compact_collector()->
2902 IsOnEvacuationCandidate(*map));
2903 if (mask < (1 << kBitsPerByte)) {
2904 test_b(Operand::StaticVariable(reference), static_cast<uint8_t>(mask));
2906 test(Operand::StaticVariable(reference), Immediate(mask));
2908 j(cc, condition_met, condition_met_distance);
2912 void MacroAssembler::CheckMapDeprecated(Handle<Map> map,
2914 Label* if_deprecated) {
2915 if (map->CanBeDeprecated()) {
2917 mov(scratch, FieldOperand(scratch, Map::kBitField3Offset));
2918 and_(scratch, Immediate(Smi::FromInt(Map::Deprecated::kMask)));
2919 j(not_zero, if_deprecated);
2924 void MacroAssembler::JumpIfBlack(Register object,
2928 Label::Distance on_black_near) {
2929 HasColor(object, scratch0, scratch1,
2930 on_black, on_black_near,
2931 1, 0); // kBlackBitPattern.
2932 ASSERT(strcmp(Marking::kBlackBitPattern, "10") == 0);
2936 void MacroAssembler::HasColor(Register object,
2937 Register bitmap_scratch,
2938 Register mask_scratch,
2940 Label::Distance has_color_distance,
2943 ASSERT(!AreAliased(object, bitmap_scratch, mask_scratch, ecx));
2945 GetMarkBits(object, bitmap_scratch, mask_scratch);
2947 Label other_color, word_boundary;
2948 test(mask_scratch, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
2949 j(first_bit == 1 ? zero : not_zero, &other_color, Label::kNear);
2950 add(mask_scratch, mask_scratch); // Shift left 1 by adding.
2951 j(zero, &word_boundary, Label::kNear);
2952 test(mask_scratch, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
2953 j(second_bit == 1 ? not_zero : zero, has_color, has_color_distance);
2954 jmp(&other_color, Label::kNear);
2956 bind(&word_boundary);
2957 test_b(Operand(bitmap_scratch, MemoryChunk::kHeaderSize + kPointerSize), 1);
2959 j(second_bit == 1 ? not_zero : zero, has_color, has_color_distance);
2964 void MacroAssembler::GetMarkBits(Register addr_reg,
2965 Register bitmap_reg,
2966 Register mask_reg) {
2967 ASSERT(!AreAliased(addr_reg, mask_reg, bitmap_reg, ecx));
2968 mov(bitmap_reg, Immediate(~Page::kPageAlignmentMask));
2969 and_(bitmap_reg, addr_reg);
2972 Bitmap::kBitsPerCellLog2 + kPointerSizeLog2 - Bitmap::kBytesPerCellLog2;
2975 (Page::kPageAlignmentMask >> shift) & ~(Bitmap::kBytesPerCell - 1));
2977 add(bitmap_reg, ecx);
2979 shr(ecx, kPointerSizeLog2);
2980 and_(ecx, (1 << Bitmap::kBitsPerCellLog2) - 1);
2981 mov(mask_reg, Immediate(1));
2986 void MacroAssembler::EnsureNotWhite(
2988 Register bitmap_scratch,
2989 Register mask_scratch,
2990 Label* value_is_white_and_not_data,
2991 Label::Distance distance) {
2992 ASSERT(!AreAliased(value, bitmap_scratch, mask_scratch, ecx));
2993 GetMarkBits(value, bitmap_scratch, mask_scratch);
2995 // If the value is black or grey we don't need to do anything.
2996 ASSERT(strcmp(Marking::kWhiteBitPattern, "00") == 0);
2997 ASSERT(strcmp(Marking::kBlackBitPattern, "10") == 0);
2998 ASSERT(strcmp(Marking::kGreyBitPattern, "11") == 0);
2999 ASSERT(strcmp(Marking::kImpossibleBitPattern, "01") == 0);
3003 // Since both black and grey have a 1 in the first position and white does
3004 // not have a 1 there we only need to check one bit.
3005 test(mask_scratch, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
3006 j(not_zero, &done, Label::kNear);
3008 if (emit_debug_code()) {
3009 // Check for impossible bit pattern.
3012 // shl. May overflow making the check conservative.
3013 add(mask_scratch, mask_scratch);
3014 test(mask_scratch, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
3015 j(zero, &ok, Label::kNear);
3021 // Value is white. We check whether it is data that doesn't need scanning.
3022 // Currently only checks for HeapNumber and non-cons strings.
3023 Register map = ecx; // Holds map while checking type.
3024 Register length = ecx; // Holds length of object after checking type.
3025 Label not_heap_number;
3026 Label is_data_object;
3028 // Check for heap-number
3029 mov(map, FieldOperand(value, HeapObject::kMapOffset));
3030 cmp(map, isolate()->factory()->heap_number_map());
3031 j(not_equal, ¬_heap_number, Label::kNear);
3032 mov(length, Immediate(HeapNumber::kSize));
3033 jmp(&is_data_object, Label::kNear);
3035 bind(¬_heap_number);
3036 // Check for strings.
3037 ASSERT(kIsIndirectStringTag == 1 && kIsIndirectStringMask == 1);
3038 ASSERT(kNotStringTag == 0x80 && kIsNotStringMask == 0x80);
3039 // If it's a string and it's not a cons string then it's an object containing
3041 Register instance_type = ecx;
3042 movzx_b(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
3043 test_b(instance_type, kIsIndirectStringMask | kIsNotStringMask);
3044 j(not_zero, value_is_white_and_not_data);
3045 // It's a non-indirect (non-cons and non-slice) string.
3046 // If it's external, the length is just ExternalString::kSize.
3047 // Otherwise it's String::kHeaderSize + string->length() * (1 or 2).
3049 // External strings are the only ones with the kExternalStringTag bit
3051 ASSERT_EQ(0, kSeqStringTag & kExternalStringTag);
3052 ASSERT_EQ(0, kConsStringTag & kExternalStringTag);
3053 test_b(instance_type, kExternalStringTag);
3054 j(zero, ¬_external, Label::kNear);
3055 mov(length, Immediate(ExternalString::kSize));
3056 jmp(&is_data_object, Label::kNear);
3058 bind(¬_external);
3059 // Sequential string, either ASCII or UC16.
3060 ASSERT(kOneByteStringTag == 0x04);
3061 and_(length, Immediate(kStringEncodingMask));
3062 xor_(length, Immediate(kStringEncodingMask));
3063 add(length, Immediate(0x04));
3064 // Value now either 4 (if ASCII) or 8 (if UC16), i.e., char-size shifted
3065 // by 2. If we multiply the string length as smi by this, it still
3066 // won't overflow a 32-bit value.
3067 ASSERT_EQ(SeqOneByteString::kMaxSize, SeqTwoByteString::kMaxSize);
3068 ASSERT(SeqOneByteString::kMaxSize <=
3069 static_cast<int>(0xffffffffu >> (2 + kSmiTagSize)));
3070 imul(length, FieldOperand(value, String::kLengthOffset));
3071 shr(length, 2 + kSmiTagSize + kSmiShiftSize);
3072 add(length, Immediate(SeqString::kHeaderSize + kObjectAlignmentMask));
3073 and_(length, Immediate(~kObjectAlignmentMask));
3075 bind(&is_data_object);
3076 // Value is a data object, and it is white. Mark it black. Since we know
3077 // that the object is white we can make it black by flipping one bit.
3078 or_(Operand(bitmap_scratch, MemoryChunk::kHeaderSize), mask_scratch);
3080 and_(bitmap_scratch, Immediate(~Page::kPageAlignmentMask));
3081 add(Operand(bitmap_scratch, MemoryChunk::kLiveBytesOffset),
3083 if (emit_debug_code()) {
3084 mov(length, Operand(bitmap_scratch, MemoryChunk::kLiveBytesOffset));
3085 cmp(length, Operand(bitmap_scratch, MemoryChunk::kSizeOffset));
3086 Check(less_equal, "Live Bytes Count overflow chunk size");
3093 void MacroAssembler::EnumLength(Register dst, Register map) {
3094 STATIC_ASSERT(Map::EnumLengthBits::kShift == 0);
3095 mov(dst, FieldOperand(map, Map::kBitField3Offset));
3096 and_(dst, Immediate(Smi::FromInt(Map::EnumLengthBits::kMask)));
3100 void MacroAssembler::CheckEnumCache(Label* call_runtime) {
3104 // Check if the enum length field is properly initialized, indicating that
3105 // there is an enum cache.
3106 mov(ebx, FieldOperand(ecx, HeapObject::kMapOffset));
3108 EnumLength(edx, ebx);
3109 cmp(edx, Immediate(Smi::FromInt(Map::kInvalidEnumCache)));
3110 j(equal, call_runtime);
3115 mov(ebx, FieldOperand(ecx, HeapObject::kMapOffset));
3117 // For all objects but the receiver, check that the cache is empty.
3118 EnumLength(edx, ebx);
3119 cmp(edx, Immediate(Smi::FromInt(0)));
3120 j(not_equal, call_runtime);
3124 // Check that there are no elements. Register rcx contains the current JS
3125 // object we've reached through the prototype chain.
3126 mov(ecx, FieldOperand(ecx, JSObject::kElementsOffset));
3127 cmp(ecx, isolate()->factory()->empty_fixed_array());
3128 j(not_equal, call_runtime);
3130 mov(ecx, FieldOperand(ebx, Map::kPrototypeOffset));
3131 cmp(ecx, isolate()->factory()->null_value());
3132 j(not_equal, &next);
3136 void MacroAssembler::TestJSArrayForAllocationSiteInfo(
3137 Register receiver_reg,
3138 Register scratch_reg) {
3139 Label no_info_available;
3141 ExternalReference new_space_start =
3142 ExternalReference::new_space_start(isolate());
3143 ExternalReference new_space_allocation_top =
3144 ExternalReference::new_space_allocation_top_address(isolate());
3146 lea(scratch_reg, Operand(receiver_reg,
3147 JSArray::kSize + AllocationSiteInfo::kSize - kHeapObjectTag));
3148 cmp(scratch_reg, Immediate(new_space_start));
3149 j(less, &no_info_available);
3150 cmp(scratch_reg, Operand::StaticVariable(new_space_allocation_top));
3151 j(greater, &no_info_available);
3152 cmp(MemOperand(scratch_reg, -AllocationSiteInfo::kSize),
3153 Immediate(Handle<Map>(isolate()->heap()->allocation_site_info_map())));
3154 bind(&no_info_available);
3158 } } // namespace v8::internal
3160 #endif // V8_TARGET_ARCH_IA32