1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30 #if V8_TARGET_ARCH_IA32
32 #include "bootstrapper.h"
34 #include "cpu-profiler.h"
37 #include "serialize.h"
42 // -------------------------------------------------------------------------
43 // MacroAssembler implementation.
45 MacroAssembler::MacroAssembler(Isolate* arg_isolate, void* buffer, int size)
46 : Assembler(arg_isolate, buffer, size),
47 generating_stub_(false),
48 allow_stub_calls_(true),
50 if (isolate() != NULL) {
51 code_object_ = Handle<Object>(isolate()->heap()->undefined_value(),
57 void MacroAssembler::LoadRoot(Register destination, Heap::RootListIndex index) {
58 if (isolate()->heap()->RootCanBeTreatedAsConstant(index)) {
59 Handle<Object> value(&isolate()->heap()->roots_array_start()[index]);
60 mov(destination, value);
63 ExternalReference roots_array_start =
64 ExternalReference::roots_array_start(isolate());
65 mov(destination, Immediate(index));
66 mov(destination, Operand::StaticArray(destination,
72 void MacroAssembler::StoreRoot(Register source,
74 Heap::RootListIndex index) {
75 ASSERT(Heap::RootCanBeWrittenAfterInitialization(index));
76 ExternalReference roots_array_start =
77 ExternalReference::roots_array_start(isolate());
78 mov(scratch, Immediate(index));
79 mov(Operand::StaticArray(scratch, times_pointer_size, roots_array_start),
84 void MacroAssembler::CompareRoot(Register with,
86 Heap::RootListIndex index) {
87 ExternalReference roots_array_start =
88 ExternalReference::roots_array_start(isolate());
89 mov(scratch, Immediate(index));
90 cmp(with, Operand::StaticArray(scratch,
96 void MacroAssembler::CompareRoot(Register with, Heap::RootListIndex index) {
97 ASSERT(isolate()->heap()->RootCanBeTreatedAsConstant(index));
98 Handle<Object> value(&isolate()->heap()->roots_array_start()[index]);
103 void MacroAssembler::CompareRoot(const Operand& with,
104 Heap::RootListIndex index) {
105 ASSERT(isolate()->heap()->RootCanBeTreatedAsConstant(index));
106 Handle<Object> value(&isolate()->heap()->roots_array_start()[index]);
111 void MacroAssembler::InNewSpace(
115 Label* condition_met,
116 Label::Distance condition_met_distance) {
117 ASSERT(cc == equal || cc == not_equal);
118 if (scratch.is(object)) {
119 and_(scratch, Immediate(~Page::kPageAlignmentMask));
121 mov(scratch, Immediate(~Page::kPageAlignmentMask));
122 and_(scratch, object);
124 // Check that we can use a test_b.
125 ASSERT(MemoryChunk::IN_FROM_SPACE < 8);
126 ASSERT(MemoryChunk::IN_TO_SPACE < 8);
127 int mask = (1 << MemoryChunk::IN_FROM_SPACE)
128 | (1 << MemoryChunk::IN_TO_SPACE);
129 // If non-zero, the page belongs to new-space.
130 test_b(Operand(scratch, MemoryChunk::kFlagsOffset),
131 static_cast<uint8_t>(mask));
132 j(cc, condition_met, condition_met_distance);
136 void MacroAssembler::RememberedSetHelper(
137 Register object, // Only used for debug checks.
140 SaveFPRegsMode save_fp,
141 MacroAssembler::RememberedSetFinalAction and_then) {
143 if (emit_debug_code()) {
145 JumpIfNotInNewSpace(object, scratch, &ok, Label::kNear);
149 // Load store buffer top.
150 ExternalReference store_buffer =
151 ExternalReference::store_buffer_top(isolate());
152 mov(scratch, Operand::StaticVariable(store_buffer));
153 // Store pointer to buffer.
154 mov(Operand(scratch, 0), addr);
155 // Increment buffer top.
156 add(scratch, Immediate(kPointerSize));
157 // Write back new top of buffer.
158 mov(Operand::StaticVariable(store_buffer), scratch);
159 // Call stub on end of buffer.
160 // Check for end of buffer.
161 test(scratch, Immediate(StoreBuffer::kStoreBufferOverflowBit));
162 if (and_then == kReturnAtEnd) {
163 Label buffer_overflowed;
164 j(not_equal, &buffer_overflowed, Label::kNear);
166 bind(&buffer_overflowed);
168 ASSERT(and_then == kFallThroughAtEnd);
169 j(equal, &done, Label::kNear);
171 StoreBufferOverflowStub store_buffer_overflow =
172 StoreBufferOverflowStub(save_fp);
173 CallStub(&store_buffer_overflow);
174 if (and_then == kReturnAtEnd) {
177 ASSERT(and_then == kFallThroughAtEnd);
183 void MacroAssembler::ClampDoubleToUint8(XMMRegister input_reg,
184 XMMRegister scratch_reg,
185 Register result_reg) {
188 pxor(scratch_reg, scratch_reg);
189 cvtsd2si(result_reg, input_reg);
190 test(result_reg, Immediate(0xFFFFFF00));
191 j(zero, &done, Label::kNear);
192 cmp(result_reg, Immediate(0x80000000));
193 j(equal, &conv_failure, Label::kNear);
194 mov(result_reg, Immediate(0));
195 setcc(above, result_reg);
196 sub(result_reg, Immediate(1));
197 and_(result_reg, Immediate(255));
198 jmp(&done, Label::kNear);
200 Set(result_reg, Immediate(0));
201 ucomisd(input_reg, scratch_reg);
202 j(below, &done, Label::kNear);
203 Set(result_reg, Immediate(255));
208 void MacroAssembler::ClampUint8(Register reg) {
210 test(reg, Immediate(0xFFFFFF00));
211 j(zero, &done, Label::kNear);
212 setcc(negative, reg); // 1 if negative, 0 if positive.
213 dec_b(reg); // 0 if negative, 255 if positive.
218 static double kUint32Bias =
219 static_cast<double>(static_cast<uint32_t>(0xFFFFFFFF)) + 1;
222 void MacroAssembler::LoadUint32(XMMRegister dst,
224 XMMRegister scratch) {
226 cmp(src, Immediate(0));
228 Operand(reinterpret_cast<int32_t>(&kUint32Bias), RelocInfo::NONE32));
230 j(not_sign, &done, Label::kNear);
236 void MacroAssembler::RecordWriteArray(Register object,
239 SaveFPRegsMode save_fp,
240 RememberedSetAction remembered_set_action,
241 SmiCheck smi_check) {
242 // First, check if a write barrier is even needed. The tests below
243 // catch stores of Smis.
246 // Skip barrier if writing a smi.
247 if (smi_check == INLINE_SMI_CHECK) {
248 ASSERT_EQ(0, kSmiTag);
249 test(value, Immediate(kSmiTagMask));
253 // Array access: calculate the destination address in the same manner as
254 // KeyedStoreIC::GenerateGeneric. Multiply a smi by 2 to get an offset
255 // into an array of words.
256 Register dst = index;
257 lea(dst, Operand(object, index, times_half_pointer_size,
258 FixedArray::kHeaderSize - kHeapObjectTag));
261 object, dst, value, save_fp, remembered_set_action, OMIT_SMI_CHECK);
265 // Clobber clobbered input registers when running with the debug-code flag
266 // turned on to provoke errors.
267 if (emit_debug_code()) {
268 mov(value, Immediate(BitCast<int32_t>(kZapValue)));
269 mov(index, Immediate(BitCast<int32_t>(kZapValue)));
274 void MacroAssembler::RecordWriteField(
279 SaveFPRegsMode save_fp,
280 RememberedSetAction remembered_set_action,
281 SmiCheck smi_check) {
282 // First, check if a write barrier is even needed. The tests below
283 // catch stores of Smis.
286 // Skip barrier if writing a smi.
287 if (smi_check == INLINE_SMI_CHECK) {
288 JumpIfSmi(value, &done, Label::kNear);
291 // Although the object register is tagged, the offset is relative to the start
292 // of the object, so so offset must be a multiple of kPointerSize.
293 ASSERT(IsAligned(offset, kPointerSize));
295 lea(dst, FieldOperand(object, offset));
296 if (emit_debug_code()) {
298 test_b(dst, (1 << kPointerSizeLog2) - 1);
299 j(zero, &ok, Label::kNear);
305 object, dst, value, save_fp, remembered_set_action, OMIT_SMI_CHECK);
309 // Clobber clobbered input registers when running with the debug-code flag
310 // turned on to provoke errors.
311 if (emit_debug_code()) {
312 mov(value, Immediate(BitCast<int32_t>(kZapValue)));
313 mov(dst, Immediate(BitCast<int32_t>(kZapValue)));
318 void MacroAssembler::RecordWriteForMap(
323 SaveFPRegsMode save_fp) {
326 Register address = scratch1;
327 Register value = scratch2;
328 if (emit_debug_code()) {
330 lea(address, FieldOperand(object, HeapObject::kMapOffset));
331 test_b(address, (1 << kPointerSizeLog2) - 1);
332 j(zero, &ok, Label::kNear);
337 ASSERT(!object.is(value));
338 ASSERT(!object.is(address));
339 ASSERT(!value.is(address));
340 AssertNotSmi(object);
342 if (!FLAG_incremental_marking) {
346 // A single check of the map's pages interesting flag suffices, since it is
347 // only set during incremental collection, and then it's also guaranteed that
348 // the from object's page's interesting flag is also set. This optimization
349 // relies on the fact that maps can never be in new space.
350 ASSERT(!isolate()->heap()->InNewSpace(*map));
351 CheckPageFlagForMap(map,
352 MemoryChunk::kPointersToHereAreInterestingMask,
357 // Delay the initialization of |address| and |value| for the stub until it's
358 // known that the will be needed. Up until this point their values are not
359 // needed since they are embedded in the operands of instructions that need
361 lea(address, FieldOperand(object, HeapObject::kMapOffset));
362 mov(value, Immediate(map));
363 RecordWriteStub stub(object, value, address, OMIT_REMEMBERED_SET, save_fp);
368 // Clobber clobbered input registers when running with the debug-code flag
369 // turned on to provoke errors.
370 if (emit_debug_code()) {
371 mov(value, Immediate(BitCast<int32_t>(kZapValue)));
372 mov(scratch1, Immediate(BitCast<int32_t>(kZapValue)));
373 mov(scratch2, Immediate(BitCast<int32_t>(kZapValue)));
378 void MacroAssembler::RecordWrite(Register object,
381 SaveFPRegsMode fp_mode,
382 RememberedSetAction remembered_set_action,
383 SmiCheck smi_check) {
384 ASSERT(!object.is(value));
385 ASSERT(!object.is(address));
386 ASSERT(!value.is(address));
387 AssertNotSmi(object);
389 if (remembered_set_action == OMIT_REMEMBERED_SET &&
390 !FLAG_incremental_marking) {
394 if (emit_debug_code()) {
396 cmp(value, Operand(address, 0));
397 j(equal, &ok, Label::kNear);
402 // First, check if a write barrier is even needed. The tests below
403 // catch stores of Smis and stores into young gen.
406 if (smi_check == INLINE_SMI_CHECK) {
407 // Skip barrier if writing a smi.
408 JumpIfSmi(value, &done, Label::kNear);
412 value, // Used as scratch.
413 MemoryChunk::kPointersToHereAreInterestingMask,
417 CheckPageFlag(object,
418 value, // Used as scratch.
419 MemoryChunk::kPointersFromHereAreInterestingMask,
424 RecordWriteStub stub(object, value, address, remembered_set_action, fp_mode);
429 // Clobber clobbered registers when running with the debug-code flag
430 // turned on to provoke errors.
431 if (emit_debug_code()) {
432 mov(address, Immediate(BitCast<int32_t>(kZapValue)));
433 mov(value, Immediate(BitCast<int32_t>(kZapValue)));
438 #ifdef ENABLE_DEBUGGER_SUPPORT
439 void MacroAssembler::DebugBreak() {
440 Set(eax, Immediate(0));
441 mov(ebx, Immediate(ExternalReference(Runtime::kDebugBreak, isolate())));
443 call(ces.GetCode(isolate()), RelocInfo::DEBUG_BREAK);
448 void MacroAssembler::Set(Register dst, const Immediate& x) {
450 xor_(dst, dst); // Shorter than mov.
457 void MacroAssembler::Set(const Operand& dst, const Immediate& x) {
462 bool MacroAssembler::IsUnsafeImmediate(const Immediate& x) {
463 static const int kMaxImmediateBits = 17;
464 if (!RelocInfo::IsNone(x.rmode_)) return false;
465 return !is_intn(x.x_, kMaxImmediateBits);
469 void MacroAssembler::SafeSet(Register dst, const Immediate& x) {
470 if (IsUnsafeImmediate(x) && jit_cookie() != 0) {
471 Set(dst, Immediate(x.x_ ^ jit_cookie()));
472 xor_(dst, jit_cookie());
479 void MacroAssembler::SafePush(const Immediate& x) {
480 if (IsUnsafeImmediate(x) && jit_cookie() != 0) {
481 push(Immediate(x.x_ ^ jit_cookie()));
482 xor_(Operand(esp, 0), Immediate(jit_cookie()));
489 void MacroAssembler::CmpObjectType(Register heap_object,
492 mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
493 CmpInstanceType(map, type);
497 void MacroAssembler::CmpInstanceType(Register map, InstanceType type) {
498 cmpb(FieldOperand(map, Map::kInstanceTypeOffset),
499 static_cast<int8_t>(type));
503 void MacroAssembler::CheckFastElements(Register map,
505 Label::Distance distance) {
506 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
507 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
508 STATIC_ASSERT(FAST_ELEMENTS == 2);
509 STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3);
510 cmpb(FieldOperand(map, Map::kBitField2Offset),
511 Map::kMaximumBitField2FastHoleyElementValue);
512 j(above, fail, distance);
516 void MacroAssembler::CheckFastObjectElements(Register map,
518 Label::Distance distance) {
519 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
520 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
521 STATIC_ASSERT(FAST_ELEMENTS == 2);
522 STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3);
523 cmpb(FieldOperand(map, Map::kBitField2Offset),
524 Map::kMaximumBitField2FastHoleySmiElementValue);
525 j(below_equal, fail, distance);
526 cmpb(FieldOperand(map, Map::kBitField2Offset),
527 Map::kMaximumBitField2FastHoleyElementValue);
528 j(above, fail, distance);
532 void MacroAssembler::CheckFastSmiElements(Register map,
534 Label::Distance distance) {
535 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
536 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
537 cmpb(FieldOperand(map, Map::kBitField2Offset),
538 Map::kMaximumBitField2FastHoleySmiElementValue);
539 j(above, fail, distance);
543 void MacroAssembler::StoreNumberToDoubleElements(
544 Register maybe_number,
548 XMMRegister scratch2,
550 bool specialize_for_processor,
551 int elements_offset) {
552 Label smi_value, done, maybe_nan, not_nan, is_nan, have_double_value;
553 JumpIfSmi(maybe_number, &smi_value, Label::kNear);
555 CheckMap(maybe_number,
556 isolate()->factory()->heap_number_map(),
560 // Double value, canonicalize NaN.
561 uint32_t offset = HeapNumber::kValueOffset + sizeof(kHoleNanLower32);
562 cmp(FieldOperand(maybe_number, offset),
563 Immediate(kNaNOrInfinityLowerBoundUpper32));
564 j(greater_equal, &maybe_nan, Label::kNear);
567 ExternalReference canonical_nan_reference =
568 ExternalReference::address_of_canonical_non_hole_nan();
569 if (CpuFeatures::IsSupported(SSE2) && specialize_for_processor) {
570 CpuFeatureScope use_sse2(this, SSE2);
571 movdbl(scratch2, FieldOperand(maybe_number, HeapNumber::kValueOffset));
572 bind(&have_double_value);
573 movdbl(FieldOperand(elements, key, times_4,
574 FixedDoubleArray::kHeaderSize - elements_offset),
577 fld_d(FieldOperand(maybe_number, HeapNumber::kValueOffset));
578 bind(&have_double_value);
579 fstp_d(FieldOperand(elements, key, times_4,
580 FixedDoubleArray::kHeaderSize - elements_offset));
585 // Could be NaN or Infinity. If fraction is not zero, it's NaN, otherwise
586 // it's an Infinity, and the non-NaN code path applies.
587 j(greater, &is_nan, Label::kNear);
588 cmp(FieldOperand(maybe_number, HeapNumber::kValueOffset), Immediate(0));
591 if (CpuFeatures::IsSupported(SSE2) && specialize_for_processor) {
592 CpuFeatureScope use_sse2(this, SSE2);
593 movdbl(scratch2, Operand::StaticVariable(canonical_nan_reference));
595 fld_d(Operand::StaticVariable(canonical_nan_reference));
597 jmp(&have_double_value, Label::kNear);
600 // Value is a smi. Convert to a double and store.
601 // Preserve original value.
602 mov(scratch1, maybe_number);
604 if (CpuFeatures::IsSupported(SSE2) && specialize_for_processor) {
605 CpuFeatureScope fscope(this, SSE2);
606 cvtsi2sd(scratch2, scratch1);
607 movdbl(FieldOperand(elements, key, times_4,
608 FixedDoubleArray::kHeaderSize - elements_offset),
612 fild_s(Operand(esp, 0));
614 fstp_d(FieldOperand(elements, key, times_4,
615 FixedDoubleArray::kHeaderSize - elements_offset));
621 void MacroAssembler::CompareMap(Register obj,
623 Label* early_success) {
624 cmp(FieldOperand(obj, HeapObject::kMapOffset), map);
628 void MacroAssembler::CheckMap(Register obj,
631 SmiCheckType smi_check_type) {
632 if (smi_check_type == DO_SMI_CHECK) {
633 JumpIfSmi(obj, fail);
637 CompareMap(obj, map, &success);
643 void MacroAssembler::DispatchMap(Register obj,
646 Handle<Code> success,
647 SmiCheckType smi_check_type) {
649 if (smi_check_type == DO_SMI_CHECK) {
650 JumpIfSmi(obj, &fail);
652 cmp(FieldOperand(obj, HeapObject::kMapOffset), Immediate(map));
659 Condition MacroAssembler::IsObjectStringType(Register heap_object,
661 Register instance_type) {
662 mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
663 movzx_b(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
664 STATIC_ASSERT(kNotStringTag != 0);
665 test(instance_type, Immediate(kIsNotStringMask));
670 Condition MacroAssembler::IsObjectNameType(Register heap_object,
672 Register instance_type) {
673 mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
674 movzx_b(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
675 cmpb(instance_type, static_cast<uint8_t>(LAST_NAME_TYPE));
680 void MacroAssembler::IsObjectJSObjectType(Register heap_object,
684 mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
685 IsInstanceJSObjectType(map, scratch, fail);
689 void MacroAssembler::IsInstanceJSObjectType(Register map,
692 movzx_b(scratch, FieldOperand(map, Map::kInstanceTypeOffset));
693 sub(scratch, Immediate(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
695 LAST_NONCALLABLE_SPEC_OBJECT_TYPE - FIRST_NONCALLABLE_SPEC_OBJECT_TYPE);
700 void MacroAssembler::FCmp() {
701 if (CpuFeatures::IsSupported(CMOV)) {
714 void MacroAssembler::AssertNumber(Register object) {
715 if (emit_debug_code()) {
717 JumpIfSmi(object, &ok);
718 cmp(FieldOperand(object, HeapObject::kMapOffset),
719 isolate()->factory()->heap_number_map());
720 Check(equal, kOperandNotANumber);
726 void MacroAssembler::AssertSmi(Register object) {
727 if (emit_debug_code()) {
728 test(object, Immediate(kSmiTagMask));
729 Check(equal, kOperandIsNotASmi);
734 void MacroAssembler::AssertString(Register object) {
735 if (emit_debug_code()) {
736 test(object, Immediate(kSmiTagMask));
737 Check(not_equal, kOperandIsASmiAndNotAString);
739 mov(object, FieldOperand(object, HeapObject::kMapOffset));
740 CmpInstanceType(object, FIRST_NONSTRING_TYPE);
742 Check(below, kOperandIsNotAString);
747 void MacroAssembler::AssertName(Register object) {
748 if (emit_debug_code()) {
749 test(object, Immediate(kSmiTagMask));
750 Check(not_equal, kOperandIsASmiAndNotAName);
752 mov(object, FieldOperand(object, HeapObject::kMapOffset));
753 CmpInstanceType(object, LAST_NAME_TYPE);
755 Check(below_equal, kOperandIsNotAName);
760 void MacroAssembler::AssertNotSmi(Register object) {
761 if (emit_debug_code()) {
762 test(object, Immediate(kSmiTagMask));
763 Check(not_equal, kOperandIsASmi);
768 void MacroAssembler::EnterFrame(StackFrame::Type type) {
772 push(Immediate(Smi::FromInt(type)));
773 push(Immediate(CodeObject()));
774 if (emit_debug_code()) {
775 cmp(Operand(esp, 0), Immediate(isolate()->factory()->undefined_value()));
776 Check(not_equal, kCodeObjectNotProperlyPatched);
781 void MacroAssembler::LeaveFrame(StackFrame::Type type) {
782 if (emit_debug_code()) {
783 cmp(Operand(ebp, StandardFrameConstants::kMarkerOffset),
784 Immediate(Smi::FromInt(type)));
785 Check(equal, kStackFrameTypesMustMatch);
791 void MacroAssembler::EnterExitFramePrologue() {
792 // Set up the frame structure on the stack.
793 ASSERT(ExitFrameConstants::kCallerSPDisplacement == +2 * kPointerSize);
794 ASSERT(ExitFrameConstants::kCallerPCOffset == +1 * kPointerSize);
795 ASSERT(ExitFrameConstants::kCallerFPOffset == 0 * kPointerSize);
799 // Reserve room for entry stack pointer and push the code object.
800 ASSERT(ExitFrameConstants::kSPOffset == -1 * kPointerSize);
801 push(Immediate(0)); // Saved entry sp, patched before call.
802 push(Immediate(CodeObject())); // Accessed from ExitFrame::code_slot.
804 // Save the frame pointer and the context in top.
805 ExternalReference c_entry_fp_address(Isolate::kCEntryFPAddress,
807 ExternalReference context_address(Isolate::kContextAddress,
809 mov(Operand::StaticVariable(c_entry_fp_address), ebp);
810 mov(Operand::StaticVariable(context_address), esi);
814 void MacroAssembler::EnterExitFrameEpilogue(int argc, bool save_doubles) {
815 // Optionally save all XMM registers.
817 CpuFeatureScope scope(this, SSE2);
818 int space = XMMRegister::kNumRegisters * kDoubleSize + argc * kPointerSize;
819 sub(esp, Immediate(space));
820 const int offset = -2 * kPointerSize;
821 for (int i = 0; i < XMMRegister::kNumRegisters; i++) {
822 XMMRegister reg = XMMRegister::from_code(i);
823 movdbl(Operand(ebp, offset - ((i + 1) * kDoubleSize)), reg);
826 sub(esp, Immediate(argc * kPointerSize));
829 // Get the required frame alignment for the OS.
830 const int kFrameAlignment = OS::ActivationFrameAlignment();
831 if (kFrameAlignment > 0) {
832 ASSERT(IsPowerOf2(kFrameAlignment));
833 and_(esp, -kFrameAlignment);
836 // Patch the saved entry sp.
837 mov(Operand(ebp, ExitFrameConstants::kSPOffset), esp);
841 void MacroAssembler::EnterExitFrame(bool save_doubles) {
842 EnterExitFramePrologue();
844 // Set up argc and argv in callee-saved registers.
845 int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
847 lea(esi, Operand(ebp, eax, times_4, offset));
849 // Reserve space for argc, argv and isolate.
850 EnterExitFrameEpilogue(3, save_doubles);
854 void MacroAssembler::EnterApiExitFrame(int argc) {
855 EnterExitFramePrologue();
856 EnterExitFrameEpilogue(argc, false);
860 void MacroAssembler::LeaveExitFrame(bool save_doubles) {
861 // Optionally restore all XMM registers.
863 CpuFeatureScope scope(this, SSE2);
864 const int offset = -2 * kPointerSize;
865 for (int i = 0; i < XMMRegister::kNumRegisters; i++) {
866 XMMRegister reg = XMMRegister::from_code(i);
867 movdbl(reg, Operand(ebp, offset - ((i + 1) * kDoubleSize)));
871 // Get the return address from the stack and restore the frame pointer.
872 mov(ecx, Operand(ebp, 1 * kPointerSize));
873 mov(ebp, Operand(ebp, 0 * kPointerSize));
875 // Pop the arguments and the receiver from the caller stack.
876 lea(esp, Operand(esi, 1 * kPointerSize));
878 // Push the return address to get ready to return.
881 LeaveExitFrameEpilogue();
885 void MacroAssembler::LeaveExitFrameEpilogue() {
886 // Restore current context from top and clear it in debug mode.
887 ExternalReference context_address(Isolate::kContextAddress, isolate());
888 mov(esi, Operand::StaticVariable(context_address));
890 mov(Operand::StaticVariable(context_address), Immediate(0));
893 // Clear the top frame.
894 ExternalReference c_entry_fp_address(Isolate::kCEntryFPAddress,
896 mov(Operand::StaticVariable(c_entry_fp_address), Immediate(0));
900 void MacroAssembler::LeaveApiExitFrame() {
904 LeaveExitFrameEpilogue();
908 void MacroAssembler::PushTryHandler(StackHandler::Kind kind,
910 // Adjust this code if not the case.
911 STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize);
912 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
913 STATIC_ASSERT(StackHandlerConstants::kCodeOffset == 1 * kPointerSize);
914 STATIC_ASSERT(StackHandlerConstants::kStateOffset == 2 * kPointerSize);
915 STATIC_ASSERT(StackHandlerConstants::kContextOffset == 3 * kPointerSize);
916 STATIC_ASSERT(StackHandlerConstants::kFPOffset == 4 * kPointerSize);
918 // We will build up the handler from the bottom by pushing on the stack.
919 // First push the frame pointer and context.
920 if (kind == StackHandler::JS_ENTRY) {
921 // The frame pointer does not point to a JS frame so we save NULL for
922 // ebp. We expect the code throwing an exception to check ebp before
923 // dereferencing it to restore the context.
924 push(Immediate(0)); // NULL frame pointer.
925 push(Immediate(Smi::FromInt(0))); // No context.
930 // Push the state and the code object.
932 StackHandler::IndexField::encode(handler_index) |
933 StackHandler::KindField::encode(kind);
934 push(Immediate(state));
937 // Link the current handler as the next handler.
938 ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
939 push(Operand::StaticVariable(handler_address));
940 // Set this new handler as the current one.
941 mov(Operand::StaticVariable(handler_address), esp);
945 void MacroAssembler::PopTryHandler() {
946 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
947 ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
948 pop(Operand::StaticVariable(handler_address));
949 add(esp, Immediate(StackHandlerConstants::kSize - kPointerSize));
953 void MacroAssembler::JumpToHandlerEntry() {
954 // Compute the handler entry address and jump to it. The handler table is
955 // a fixed array of (smi-tagged) code offsets.
956 // eax = exception, edi = code object, edx = state.
957 mov(ebx, FieldOperand(edi, Code::kHandlerTableOffset));
958 shr(edx, StackHandler::kKindWidth);
959 mov(edx, FieldOperand(ebx, edx, times_4, FixedArray::kHeaderSize));
961 lea(edi, FieldOperand(edi, edx, times_1, Code::kHeaderSize));
966 void MacroAssembler::Throw(Register value) {
967 // Adjust this code if not the case.
968 STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize);
969 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
970 STATIC_ASSERT(StackHandlerConstants::kCodeOffset == 1 * kPointerSize);
971 STATIC_ASSERT(StackHandlerConstants::kStateOffset == 2 * kPointerSize);
972 STATIC_ASSERT(StackHandlerConstants::kContextOffset == 3 * kPointerSize);
973 STATIC_ASSERT(StackHandlerConstants::kFPOffset == 4 * kPointerSize);
975 // The exception is expected in eax.
976 if (!value.is(eax)) {
979 // Drop the stack pointer to the top of the top handler.
980 ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
981 mov(esp, Operand::StaticVariable(handler_address));
982 // Restore the next handler.
983 pop(Operand::StaticVariable(handler_address));
985 // Remove the code object and state, compute the handler address in edi.
986 pop(edi); // Code object.
987 pop(edx); // Index and state.
989 // Restore the context and frame pointer.
990 pop(esi); // Context.
991 pop(ebp); // Frame pointer.
993 // If the handler is a JS frame, restore the context to the frame.
994 // (kind == ENTRY) == (ebp == 0) == (esi == 0), so we could test either
998 j(zero, &skip, Label::kNear);
999 mov(Operand(ebp, StandardFrameConstants::kContextOffset), esi);
1002 JumpToHandlerEntry();
1006 void MacroAssembler::ThrowUncatchable(Register value) {
1007 // Adjust this code if not the case.
1008 STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize);
1009 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
1010 STATIC_ASSERT(StackHandlerConstants::kCodeOffset == 1 * kPointerSize);
1011 STATIC_ASSERT(StackHandlerConstants::kStateOffset == 2 * kPointerSize);
1012 STATIC_ASSERT(StackHandlerConstants::kContextOffset == 3 * kPointerSize);
1013 STATIC_ASSERT(StackHandlerConstants::kFPOffset == 4 * kPointerSize);
1015 // The exception is expected in eax.
1016 if (!value.is(eax)) {
1019 // Drop the stack pointer to the top of the top stack handler.
1020 ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
1021 mov(esp, Operand::StaticVariable(handler_address));
1023 // Unwind the handlers until the top ENTRY handler is found.
1024 Label fetch_next, check_kind;
1025 jmp(&check_kind, Label::kNear);
1027 mov(esp, Operand(esp, StackHandlerConstants::kNextOffset));
1030 STATIC_ASSERT(StackHandler::JS_ENTRY == 0);
1031 test(Operand(esp, StackHandlerConstants::kStateOffset),
1032 Immediate(StackHandler::KindField::kMask));
1033 j(not_zero, &fetch_next);
1035 // Set the top handler address to next handler past the top ENTRY handler.
1036 pop(Operand::StaticVariable(handler_address));
1038 // Remove the code object and state, compute the handler address in edi.
1039 pop(edi); // Code object.
1040 pop(edx); // Index and state.
1042 // Clear the context pointer and frame pointer (0 was saved in the handler).
1046 JumpToHandlerEntry();
1050 void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
1054 Label same_contexts;
1056 ASSERT(!holder_reg.is(scratch1));
1057 ASSERT(!holder_reg.is(scratch2));
1058 ASSERT(!scratch1.is(scratch2));
1060 // Load current lexical context from the stack frame.
1061 mov(scratch1, Operand(ebp, StandardFrameConstants::kContextOffset));
1063 // When generating debug code, make sure the lexical context is set.
1064 if (emit_debug_code()) {
1065 cmp(scratch1, Immediate(0));
1066 Check(not_equal, kWeShouldNotHaveAnEmptyLexicalContext);
1068 // Load the native context of the current context.
1070 Context::kHeaderSize + Context::GLOBAL_OBJECT_INDEX * kPointerSize;
1071 mov(scratch1, FieldOperand(scratch1, offset));
1072 mov(scratch1, FieldOperand(scratch1, GlobalObject::kNativeContextOffset));
1074 // Check the context is a native context.
1075 if (emit_debug_code()) {
1076 // Read the first word and compare to native_context_map.
1077 cmp(FieldOperand(scratch1, HeapObject::kMapOffset),
1078 isolate()->factory()->native_context_map());
1079 Check(equal, kJSGlobalObjectNativeContextShouldBeANativeContext);
1082 // Check if both contexts are the same.
1083 cmp(scratch1, FieldOperand(holder_reg, JSGlobalProxy::kNativeContextOffset));
1084 j(equal, &same_contexts);
1086 // Compare security tokens, save holder_reg on the stack so we can use it
1087 // as a temporary register.
1089 // Check that the security token in the calling global object is
1090 // compatible with the security token in the receiving global
1093 FieldOperand(holder_reg, JSGlobalProxy::kNativeContextOffset));
1095 // Check the context is a native context.
1096 if (emit_debug_code()) {
1097 cmp(scratch2, isolate()->factory()->null_value());
1098 Check(not_equal, kJSGlobalProxyContextShouldNotBeNull);
1100 // Read the first word and compare to native_context_map(),
1101 cmp(FieldOperand(scratch2, HeapObject::kMapOffset),
1102 isolate()->factory()->native_context_map());
1103 Check(equal, kJSGlobalObjectNativeContextShouldBeANativeContext);
1106 int token_offset = Context::kHeaderSize +
1107 Context::SECURITY_TOKEN_INDEX * kPointerSize;
1108 mov(scratch1, FieldOperand(scratch1, token_offset));
1109 cmp(scratch1, FieldOperand(scratch2, token_offset));
1112 bind(&same_contexts);
1116 // Compute the hash code from the untagged key. This must be kept in sync
1117 // with ComputeIntegerHash in utils.h.
1119 // Note: r0 will contain hash code
1120 void MacroAssembler::GetNumberHash(Register r0, Register scratch) {
1121 // Xor original key with a seed.
1122 if (Serializer::enabled()) {
1123 ExternalReference roots_array_start =
1124 ExternalReference::roots_array_start(isolate());
1125 mov(scratch, Immediate(Heap::kHashSeedRootIndex));
1127 Operand::StaticArray(scratch, times_pointer_size, roots_array_start));
1131 int32_t seed = isolate()->heap()->HashSeed();
1132 xor_(r0, Immediate(seed));
1135 // hash = ~hash + (hash << 15);
1140 // hash = hash ^ (hash >> 12);
1144 // hash = hash + (hash << 2);
1145 lea(r0, Operand(r0, r0, times_4, 0));
1146 // hash = hash ^ (hash >> 4);
1150 // hash = hash * 2057;
1152 // hash = hash ^ (hash >> 16);
1160 void MacroAssembler::LoadFromNumberDictionary(Label* miss,
1169 // elements - holds the slow-case elements of the receiver and is unchanged.
1171 // key - holds the smi key on entry and is unchanged.
1173 // Scratch registers:
1175 // r0 - holds the untagged key on entry and holds the hash once computed.
1177 // r1 - used to hold the capacity mask of the dictionary
1179 // r2 - used for the index into the dictionary.
1181 // result - holds the result on exit if the load succeeds and we fall through.
1185 GetNumberHash(r0, r1);
1187 // Compute capacity mask.
1188 mov(r1, FieldOperand(elements, SeededNumberDictionary::kCapacityOffset));
1189 shr(r1, kSmiTagSize); // convert smi to int
1192 // Generate an unrolled loop that performs a few probes before giving up.
1193 const int kProbes = 4;
1194 for (int i = 0; i < kProbes; i++) {
1195 // Use r2 for index calculations and keep the hash intact in r0.
1197 // Compute the masked index: (hash + i + i * i) & mask.
1199 add(r2, Immediate(SeededNumberDictionary::GetProbeOffset(i)));
1203 // Scale the index by multiplying by the entry size.
1204 ASSERT(SeededNumberDictionary::kEntrySize == 3);
1205 lea(r2, Operand(r2, r2, times_2, 0)); // r2 = r2 * 3
1207 // Check if the key matches.
1208 cmp(key, FieldOperand(elements,
1211 SeededNumberDictionary::kElementsStartOffset));
1212 if (i != (kProbes - 1)) {
1220 // Check that the value is a normal propety.
1221 const int kDetailsOffset =
1222 SeededNumberDictionary::kElementsStartOffset + 2 * kPointerSize;
1223 ASSERT_EQ(NORMAL, 0);
1224 test(FieldOperand(elements, r2, times_pointer_size, kDetailsOffset),
1225 Immediate(PropertyDetails::TypeField::kMask << kSmiTagSize));
1228 // Get the value at the masked, scaled index.
1229 const int kValueOffset =
1230 SeededNumberDictionary::kElementsStartOffset + kPointerSize;
1231 mov(result, FieldOperand(elements, r2, times_pointer_size, kValueOffset));
1235 void MacroAssembler::LoadAllocationTopHelper(Register result,
1237 AllocationFlags flags) {
1238 ExternalReference allocation_top =
1239 AllocationUtils::GetAllocationTopReference(isolate(), flags);
1241 // Just return if allocation top is already known.
1242 if ((flags & RESULT_CONTAINS_TOP) != 0) {
1243 // No use of scratch if allocation top is provided.
1244 ASSERT(scratch.is(no_reg));
1246 // Assert that result actually contains top on entry.
1247 cmp(result, Operand::StaticVariable(allocation_top));
1248 Check(equal, kUnexpectedAllocationTop);
1253 // Move address of new object to result. Use scratch register if available.
1254 if (scratch.is(no_reg)) {
1255 mov(result, Operand::StaticVariable(allocation_top));
1257 mov(scratch, Immediate(allocation_top));
1258 mov(result, Operand(scratch, 0));
1263 void MacroAssembler::UpdateAllocationTopHelper(Register result_end,
1265 AllocationFlags flags) {
1266 if (emit_debug_code()) {
1267 test(result_end, Immediate(kObjectAlignmentMask));
1268 Check(zero, kUnalignedAllocationInNewSpace);
1271 ExternalReference allocation_top =
1272 AllocationUtils::GetAllocationTopReference(isolate(), flags);
1274 // Update new top. Use scratch if available.
1275 if (scratch.is(no_reg)) {
1276 mov(Operand::StaticVariable(allocation_top), result_end);
1278 mov(Operand(scratch, 0), result_end);
1283 void MacroAssembler::Allocate(int object_size,
1285 Register result_end,
1288 AllocationFlags flags) {
1289 ASSERT((flags & (RESULT_CONTAINS_TOP | SIZE_IN_WORDS)) == 0);
1290 ASSERT(object_size <= Page::kMaxNonCodeHeapObjectSize);
1291 if (!FLAG_inline_new) {
1292 if (emit_debug_code()) {
1293 // Trash the registers to simulate an allocation failure.
1294 mov(result, Immediate(0x7091));
1295 if (result_end.is_valid()) {
1296 mov(result_end, Immediate(0x7191));
1298 if (scratch.is_valid()) {
1299 mov(scratch, Immediate(0x7291));
1305 ASSERT(!result.is(result_end));
1307 // Load address of new object into result.
1308 LoadAllocationTopHelper(result, scratch, flags);
1310 // Align the next allocation. Storing the filler map without checking top is
1311 // always safe because the limit of the heap is always aligned.
1312 if ((flags & DOUBLE_ALIGNMENT) != 0) {
1313 ASSERT((flags & PRETENURE_OLD_POINTER_SPACE) == 0);
1314 ASSERT(kPointerAlignment * 2 == kDoubleAlignment);
1316 test(result, Immediate(kDoubleAlignmentMask));
1317 j(zero, &aligned, Label::kNear);
1318 mov(Operand(result, 0),
1319 Immediate(isolate()->factory()->one_pointer_filler_map()));
1320 add(result, Immediate(kDoubleSize / 2));
1324 Register top_reg = result_end.is_valid() ? result_end : result;
1326 // Calculate new top and bail out if space is exhausted.
1327 ExternalReference allocation_limit =
1328 AllocationUtils::GetAllocationLimitReference(isolate(), flags);
1330 if (!top_reg.is(result)) {
1331 mov(top_reg, result);
1333 add(top_reg, Immediate(object_size));
1334 j(carry, gc_required);
1335 cmp(top_reg, Operand::StaticVariable(allocation_limit));
1336 j(above, gc_required);
1338 // Update allocation top.
1339 UpdateAllocationTopHelper(top_reg, scratch, flags);
1341 // Tag result if requested.
1342 bool tag_result = (flags & TAG_OBJECT) != 0;
1343 if (top_reg.is(result)) {
1345 sub(result, Immediate(object_size - kHeapObjectTag));
1347 sub(result, Immediate(object_size));
1349 } else if (tag_result) {
1350 ASSERT(kHeapObjectTag == 1);
1356 void MacroAssembler::Allocate(int header_size,
1357 ScaleFactor element_size,
1358 Register element_count,
1359 RegisterValueType element_count_type,
1361 Register result_end,
1364 AllocationFlags flags) {
1365 ASSERT((flags & SIZE_IN_WORDS) == 0);
1366 if (!FLAG_inline_new) {
1367 if (emit_debug_code()) {
1368 // Trash the registers to simulate an allocation failure.
1369 mov(result, Immediate(0x7091));
1370 mov(result_end, Immediate(0x7191));
1371 if (scratch.is_valid()) {
1372 mov(scratch, Immediate(0x7291));
1374 // Register element_count is not modified by the function.
1379 ASSERT(!result.is(result_end));
1381 // Load address of new object into result.
1382 LoadAllocationTopHelper(result, scratch, flags);
1384 // Align the next allocation. Storing the filler map without checking top is
1385 // always safe because the limit of the heap is always aligned.
1386 if ((flags & DOUBLE_ALIGNMENT) != 0) {
1387 ASSERT((flags & PRETENURE_OLD_POINTER_SPACE) == 0);
1388 ASSERT(kPointerAlignment * 2 == kDoubleAlignment);
1390 test(result, Immediate(kDoubleAlignmentMask));
1391 j(zero, &aligned, Label::kNear);
1392 mov(Operand(result, 0),
1393 Immediate(isolate()->factory()->one_pointer_filler_map()));
1394 add(result, Immediate(kDoubleSize / 2));
1398 // Calculate new top and bail out if space is exhausted.
1399 ExternalReference allocation_limit =
1400 AllocationUtils::GetAllocationLimitReference(isolate(), flags);
1402 // We assume that element_count*element_size + header_size does not
1404 if (element_count_type == REGISTER_VALUE_IS_SMI) {
1405 STATIC_ASSERT(static_cast<ScaleFactor>(times_2 - 1) == times_1);
1406 STATIC_ASSERT(static_cast<ScaleFactor>(times_4 - 1) == times_2);
1407 STATIC_ASSERT(static_cast<ScaleFactor>(times_8 - 1) == times_4);
1408 ASSERT(element_size >= times_2);
1409 ASSERT(kSmiTagSize == 1);
1410 element_size = static_cast<ScaleFactor>(element_size - 1);
1412 ASSERT(element_count_type == REGISTER_VALUE_IS_INT32);
1414 lea(result_end, Operand(element_count, element_size, header_size));
1415 add(result_end, result);
1416 j(carry, gc_required);
1417 cmp(result_end, Operand::StaticVariable(allocation_limit));
1418 j(above, gc_required);
1420 if ((flags & TAG_OBJECT) != 0) {
1421 ASSERT(kHeapObjectTag == 1);
1425 // Update allocation top.
1426 UpdateAllocationTopHelper(result_end, scratch, flags);
1430 void MacroAssembler::Allocate(Register object_size,
1432 Register result_end,
1435 AllocationFlags flags) {
1436 ASSERT((flags & (RESULT_CONTAINS_TOP | SIZE_IN_WORDS)) == 0);
1437 if (!FLAG_inline_new) {
1438 if (emit_debug_code()) {
1439 // Trash the registers to simulate an allocation failure.
1440 mov(result, Immediate(0x7091));
1441 mov(result_end, Immediate(0x7191));
1442 if (scratch.is_valid()) {
1443 mov(scratch, Immediate(0x7291));
1445 // object_size is left unchanged by this function.
1450 ASSERT(!result.is(result_end));
1452 // Load address of new object into result.
1453 LoadAllocationTopHelper(result, scratch, flags);
1455 // Align the next allocation. Storing the filler map without checking top is
1456 // always safe because the limit of the heap is always aligned.
1457 if ((flags & DOUBLE_ALIGNMENT) != 0) {
1458 ASSERT((flags & PRETENURE_OLD_POINTER_SPACE) == 0);
1459 ASSERT(kPointerAlignment * 2 == kDoubleAlignment);
1461 test(result, Immediate(kDoubleAlignmentMask));
1462 j(zero, &aligned, Label::kNear);
1463 mov(Operand(result, 0),
1464 Immediate(isolate()->factory()->one_pointer_filler_map()));
1465 add(result, Immediate(kDoubleSize / 2));
1469 // Calculate new top and bail out if space is exhausted.
1470 ExternalReference allocation_limit =
1471 AllocationUtils::GetAllocationLimitReference(isolate(), flags);
1473 if (!object_size.is(result_end)) {
1474 mov(result_end, object_size);
1476 add(result_end, result);
1477 j(carry, gc_required);
1478 cmp(result_end, Operand::StaticVariable(allocation_limit));
1479 j(above, gc_required);
1481 // Tag result if requested.
1482 if ((flags & TAG_OBJECT) != 0) {
1483 ASSERT(kHeapObjectTag == 1);
1487 // Update allocation top.
1488 UpdateAllocationTopHelper(result_end, scratch, flags);
1492 void MacroAssembler::UndoAllocationInNewSpace(Register object) {
1493 ExternalReference new_space_allocation_top =
1494 ExternalReference::new_space_allocation_top_address(isolate());
1496 // Make sure the object has no tag before resetting top.
1497 and_(object, Immediate(~kHeapObjectTagMask));
1499 cmp(object, Operand::StaticVariable(new_space_allocation_top));
1500 Check(below, kUndoAllocationOfNonAllocatedMemory);
1502 mov(Operand::StaticVariable(new_space_allocation_top), object);
1506 void MacroAssembler::AllocateHeapNumber(Register result,
1509 Label* gc_required) {
1510 // Allocate heap number in new space.
1511 Allocate(HeapNumber::kSize, result, scratch1, scratch2, gc_required,
1515 mov(FieldOperand(result, HeapObject::kMapOffset),
1516 Immediate(isolate()->factory()->heap_number_map()));
1520 void MacroAssembler::AllocateTwoByteString(Register result,
1525 Label* gc_required) {
1526 // Calculate the number of bytes needed for the characters in the string while
1527 // observing object alignment.
1528 ASSERT((SeqTwoByteString::kHeaderSize & kObjectAlignmentMask) == 0);
1529 ASSERT(kShortSize == 2);
1530 // scratch1 = length * 2 + kObjectAlignmentMask.
1531 lea(scratch1, Operand(length, length, times_1, kObjectAlignmentMask));
1532 and_(scratch1, Immediate(~kObjectAlignmentMask));
1534 // Allocate two byte string in new space.
1535 Allocate(SeqTwoByteString::kHeaderSize,
1538 REGISTER_VALUE_IS_INT32,
1545 // Set the map, length and hash field.
1546 mov(FieldOperand(result, HeapObject::kMapOffset),
1547 Immediate(isolate()->factory()->string_map()));
1548 mov(scratch1, length);
1550 mov(FieldOperand(result, String::kLengthOffset), scratch1);
1551 mov(FieldOperand(result, String::kHashFieldOffset),
1552 Immediate(String::kEmptyHashField));
1556 void MacroAssembler::AllocateAsciiString(Register result,
1561 Label* gc_required) {
1562 // Calculate the number of bytes needed for the characters in the string while
1563 // observing object alignment.
1564 ASSERT((SeqOneByteString::kHeaderSize & kObjectAlignmentMask) == 0);
1565 mov(scratch1, length);
1566 ASSERT(kCharSize == 1);
1567 add(scratch1, Immediate(kObjectAlignmentMask));
1568 and_(scratch1, Immediate(~kObjectAlignmentMask));
1570 // Allocate ASCII string in new space.
1571 Allocate(SeqOneByteString::kHeaderSize,
1574 REGISTER_VALUE_IS_INT32,
1581 // Set the map, length and hash field.
1582 mov(FieldOperand(result, HeapObject::kMapOffset),
1583 Immediate(isolate()->factory()->ascii_string_map()));
1584 mov(scratch1, length);
1586 mov(FieldOperand(result, String::kLengthOffset), scratch1);
1587 mov(FieldOperand(result, String::kHashFieldOffset),
1588 Immediate(String::kEmptyHashField));
1592 void MacroAssembler::AllocateAsciiString(Register result,
1596 Label* gc_required) {
1599 // Allocate ASCII string in new space.
1600 Allocate(SeqOneByteString::SizeFor(length), result, scratch1, scratch2,
1601 gc_required, TAG_OBJECT);
1603 // Set the map, length and hash field.
1604 mov(FieldOperand(result, HeapObject::kMapOffset),
1605 Immediate(isolate()->factory()->ascii_string_map()));
1606 mov(FieldOperand(result, String::kLengthOffset),
1607 Immediate(Smi::FromInt(length)));
1608 mov(FieldOperand(result, String::kHashFieldOffset),
1609 Immediate(String::kEmptyHashField));
1613 void MacroAssembler::AllocateTwoByteConsString(Register result,
1616 Label* gc_required) {
1617 // Allocate heap number in new space.
1618 Allocate(ConsString::kSize, result, scratch1, scratch2, gc_required,
1621 // Set the map. The other fields are left uninitialized.
1622 mov(FieldOperand(result, HeapObject::kMapOffset),
1623 Immediate(isolate()->factory()->cons_string_map()));
1627 void MacroAssembler::AllocateAsciiConsString(Register result,
1630 Label* gc_required) {
1631 Label allocate_new_space, install_map;
1632 AllocationFlags flags = TAG_OBJECT;
1634 ExternalReference high_promotion_mode = ExternalReference::
1635 new_space_high_promotion_mode_active_address(isolate());
1637 test(Operand::StaticVariable(high_promotion_mode), Immediate(1));
1638 j(zero, &allocate_new_space);
1640 Allocate(ConsString::kSize,
1645 static_cast<AllocationFlags>(flags | PRETENURE_OLD_POINTER_SPACE));
1648 bind(&allocate_new_space);
1649 Allocate(ConsString::kSize,
1657 // Set the map. The other fields are left uninitialized.
1658 mov(FieldOperand(result, HeapObject::kMapOffset),
1659 Immediate(isolate()->factory()->cons_ascii_string_map()));
1663 void MacroAssembler::AllocateTwoByteSlicedString(Register result,
1666 Label* gc_required) {
1667 // Allocate heap number in new space.
1668 Allocate(SlicedString::kSize, result, scratch1, scratch2, gc_required,
1671 // Set the map. The other fields are left uninitialized.
1672 mov(FieldOperand(result, HeapObject::kMapOffset),
1673 Immediate(isolate()->factory()->sliced_string_map()));
1677 void MacroAssembler::AllocateAsciiSlicedString(Register result,
1680 Label* gc_required) {
1681 // Allocate heap number in new space.
1682 Allocate(SlicedString::kSize, result, scratch1, scratch2, gc_required,
1685 // Set the map. The other fields are left uninitialized.
1686 mov(FieldOperand(result, HeapObject::kMapOffset),
1687 Immediate(isolate()->factory()->sliced_ascii_string_map()));
1691 // Copy memory, byte-by-byte, from source to destination. Not optimized for
1692 // long or aligned copies. The contents of scratch and length are destroyed.
1693 // Source and destination are incremented by length.
1694 // Many variants of movsb, loop unrolling, word moves, and indexed operands
1695 // have been tried here already, and this is fastest.
1696 // A simpler loop is faster on small copies, but 30% slower on large ones.
1697 // The cld() instruction must have been emitted, to set the direction flag(),
1698 // before calling this function.
1699 void MacroAssembler::CopyBytes(Register source,
1700 Register destination,
1703 Label loop, done, short_string, short_loop;
1704 // Experimentation shows that the short string loop is faster if length < 10.
1705 cmp(length, Immediate(10));
1706 j(less_equal, &short_string);
1708 ASSERT(source.is(esi));
1709 ASSERT(destination.is(edi));
1710 ASSERT(length.is(ecx));
1712 // Because source is 4-byte aligned in our uses of this function,
1713 // we keep source aligned for the rep_movs call by copying the odd bytes
1714 // at the end of the ranges.
1715 mov(scratch, Operand(source, length, times_1, -4));
1716 mov(Operand(destination, length, times_1, -4), scratch);
1720 and_(scratch, Immediate(0x3));
1721 add(destination, scratch);
1724 bind(&short_string);
1725 test(length, length);
1729 mov_b(scratch, Operand(source, 0));
1730 mov_b(Operand(destination, 0), scratch);
1734 j(not_zero, &short_loop);
1740 void MacroAssembler::InitializeFieldsWithFiller(Register start_offset,
1741 Register end_offset,
1746 mov(Operand(start_offset, 0), filler);
1747 add(start_offset, Immediate(kPointerSize));
1749 cmp(start_offset, end_offset);
1754 void MacroAssembler::BooleanBitTest(Register object,
1757 bit_index += kSmiTagSize + kSmiShiftSize;
1758 ASSERT(IsPowerOf2(kBitsPerByte));
1759 int byte_index = bit_index / kBitsPerByte;
1760 int byte_bit_index = bit_index & (kBitsPerByte - 1);
1761 test_b(FieldOperand(object, field_offset + byte_index),
1762 static_cast<byte>(1 << byte_bit_index));
1767 void MacroAssembler::NegativeZeroTest(Register result,
1769 Label* then_label) {
1771 test(result, result);
1774 j(sign, then_label);
1779 void MacroAssembler::NegativeZeroTest(Register result,
1783 Label* then_label) {
1785 test(result, result);
1789 j(sign, then_label);
1794 void MacroAssembler::TryGetFunctionPrototype(Register function,
1798 bool miss_on_bound_function) {
1799 // Check that the receiver isn't a smi.
1800 JumpIfSmi(function, miss);
1802 // Check that the function really is a function.
1803 CmpObjectType(function, JS_FUNCTION_TYPE, result);
1806 if (miss_on_bound_function) {
1807 // If a bound function, go to miss label.
1809 FieldOperand(function, JSFunction::kSharedFunctionInfoOffset));
1810 BooleanBitTest(scratch, SharedFunctionInfo::kCompilerHintsOffset,
1811 SharedFunctionInfo::kBoundFunction);
1815 // Make sure that the function has an instance prototype.
1817 movzx_b(scratch, FieldOperand(result, Map::kBitFieldOffset));
1818 test(scratch, Immediate(1 << Map::kHasNonInstancePrototype));
1819 j(not_zero, &non_instance);
1821 // Get the prototype or initial map from the function.
1823 FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
1825 // If the prototype or initial map is the hole, don't return it and
1826 // simply miss the cache instead. This will allow us to allocate a
1827 // prototype object on-demand in the runtime system.
1828 cmp(result, Immediate(isolate()->factory()->the_hole_value()));
1831 // If the function does not have an initial map, we're done.
1833 CmpObjectType(result, MAP_TYPE, scratch);
1834 j(not_equal, &done);
1836 // Get the prototype from the initial map.
1837 mov(result, FieldOperand(result, Map::kPrototypeOffset));
1840 // Non-instance prototype: Fetch prototype from constructor field
1842 bind(&non_instance);
1843 mov(result, FieldOperand(result, Map::kConstructorOffset));
1850 void MacroAssembler::CallStub(CodeStub* stub, TypeFeedbackId ast_id) {
1851 ASSERT(AllowThisStubCall(stub)); // Calls are not allowed in some stubs.
1852 call(stub->GetCode(isolate()), RelocInfo::CODE_TARGET, ast_id);
1856 void MacroAssembler::TailCallStub(CodeStub* stub) {
1857 ASSERT(allow_stub_calls_ ||
1858 stub->CompilingCallsToThisStubIsGCSafe(isolate()));
1859 jmp(stub->GetCode(isolate()), RelocInfo::CODE_TARGET);
1863 void MacroAssembler::StubReturn(int argc) {
1864 ASSERT(argc >= 1 && generating_stub());
1865 ret((argc - 1) * kPointerSize);
1869 bool MacroAssembler::AllowThisStubCall(CodeStub* stub) {
1870 if (!has_frame_ && stub->SometimesSetsUpAFrame()) return false;
1871 return allow_stub_calls_ || stub->CompilingCallsToThisStubIsGCSafe(isolate());
1875 void MacroAssembler::IllegalOperation(int num_arguments) {
1876 if (num_arguments > 0) {
1877 add(esp, Immediate(num_arguments * kPointerSize));
1879 mov(eax, Immediate(isolate()->factory()->undefined_value()));
1883 void MacroAssembler::IndexFromHash(Register hash, Register index) {
1884 // The assert checks that the constants for the maximum number of digits
1885 // for an array index cached in the hash field and the number of bits
1886 // reserved for it does not conflict.
1887 ASSERT(TenToThe(String::kMaxCachedArrayIndexLength) <
1888 (1 << String::kArrayIndexValueBits));
1889 // We want the smi-tagged index in key. kArrayIndexValueMask has zeros in
1890 // the low kHashShift bits.
1891 and_(hash, String::kArrayIndexValueMask);
1892 STATIC_ASSERT(String::kHashShift >= kSmiTagSize && kSmiTag == 0);
1893 if (String::kHashShift > kSmiTagSize) {
1894 shr(hash, String::kHashShift - kSmiTagSize);
1896 if (!index.is(hash)) {
1902 void MacroAssembler::CallRuntime(Runtime::FunctionId id, int num_arguments) {
1903 CallRuntime(Runtime::FunctionForId(id), num_arguments);
1907 void MacroAssembler::CallRuntimeSaveDoubles(Runtime::FunctionId id) {
1908 const Runtime::Function* function = Runtime::FunctionForId(id);
1909 Set(eax, Immediate(function->nargs));
1910 mov(ebx, Immediate(ExternalReference(function, isolate())));
1911 CEntryStub ces(1, CpuFeatures::IsSupported(SSE2) ? kSaveFPRegs
1917 void MacroAssembler::CallRuntime(const Runtime::Function* f,
1918 int num_arguments) {
1919 // If the expected number of arguments of the runtime function is
1920 // constant, we check that the actual number of arguments match the
1922 if (f->nargs >= 0 && f->nargs != num_arguments) {
1923 IllegalOperation(num_arguments);
1927 // TODO(1236192): Most runtime routines don't need the number of
1928 // arguments passed in because it is constant. At some point we
1929 // should remove this need and make the runtime routine entry code
1931 Set(eax, Immediate(num_arguments));
1932 mov(ebx, Immediate(ExternalReference(f, isolate())));
1938 void MacroAssembler::CallExternalReference(ExternalReference ref,
1939 int num_arguments) {
1940 mov(eax, Immediate(num_arguments));
1941 mov(ebx, Immediate(ref));
1948 void MacroAssembler::TailCallExternalReference(const ExternalReference& ext,
1951 // TODO(1236192): Most runtime routines don't need the number of
1952 // arguments passed in because it is constant. At some point we
1953 // should remove this need and make the runtime routine entry code
1955 Set(eax, Immediate(num_arguments));
1956 JumpToExternalReference(ext);
1960 void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid,
1963 TailCallExternalReference(ExternalReference(fid, isolate()),
1969 // If true, a Handle<T> returned by value from a function with cdecl calling
1970 // convention will be returned directly as a value of location_ field in a
1972 // If false, it is returned as a pointer to a preallocated by caller memory
1973 // region. Pointer to this region should be passed to a function as an
1974 // implicit first argument.
1975 #if defined(USING_BSD_ABI) || defined(__MINGW32__) || defined(__CYGWIN__)
1976 static const bool kReturnHandlesDirectly = true;
1978 static const bool kReturnHandlesDirectly = false;
1982 Operand ApiParameterOperand(int index, bool returns_handle) {
1983 int offset = (index +(kReturnHandlesDirectly || !returns_handle ? 0 : 1));
1984 return Operand(esp, offset * kPointerSize);
1988 void MacroAssembler::PrepareCallApiFunction(int argc, bool returns_handle) {
1989 if (kReturnHandlesDirectly || !returns_handle) {
1990 EnterApiExitFrame(argc);
1991 // When handles are returned directly we don't have to allocate extra
1992 // space for and pass an out parameter.
1993 if (emit_debug_code()) {
1994 mov(esi, Immediate(BitCast<int32_t>(kZapValue)));
1997 // We allocate two additional slots: return value and pointer to it.
1998 EnterApiExitFrame(argc + 2);
2000 // The argument slots are filled as follows:
2002 // n + 1: output slot
2006 // 0: pointer to the output slot
2008 lea(esi, Operand(esp, (argc + 1) * kPointerSize));
2009 mov(Operand(esp, 0 * kPointerSize), esi);
2010 if (emit_debug_code()) {
2011 mov(Operand(esi, 0), Immediate(0));
2017 void MacroAssembler::CallApiFunctionAndReturn(Address function_address,
2018 Address thunk_address,
2019 Operand thunk_last_arg,
2021 bool returns_handle,
2022 int return_value_offset) {
2023 ExternalReference next_address =
2024 ExternalReference::handle_scope_next_address(isolate());
2025 ExternalReference limit_address =
2026 ExternalReference::handle_scope_limit_address(isolate());
2027 ExternalReference level_address =
2028 ExternalReference::handle_scope_level_address(isolate());
2030 // Allocate HandleScope in callee-save registers.
2031 mov(ebx, Operand::StaticVariable(next_address));
2032 mov(edi, Operand::StaticVariable(limit_address));
2033 add(Operand::StaticVariable(level_address), Immediate(1));
2035 if (FLAG_log_timer_events) {
2036 FrameScope frame(this, StackFrame::MANUAL);
2037 PushSafepointRegisters();
2038 PrepareCallCFunction(1, eax);
2039 mov(Operand(esp, 0),
2040 Immediate(ExternalReference::isolate_address(isolate())));
2041 CallCFunction(ExternalReference::log_enter_external_function(isolate()), 1);
2042 PopSafepointRegisters();
2046 Label profiler_disabled;
2047 Label end_profiler_check;
2048 bool* is_profiling_flag =
2049 isolate()->cpu_profiler()->is_profiling_address();
2050 STATIC_ASSERT(sizeof(*is_profiling_flag) == 1);
2051 mov(eax, Immediate(reinterpret_cast<Address>(is_profiling_flag)));
2052 cmpb(Operand(eax, 0), 0);
2053 j(zero, &profiler_disabled);
2055 // Additional parameter is the address of the actual getter function.
2056 mov(thunk_last_arg, Immediate(function_address));
2057 // Call the api function.
2058 call(thunk_address, RelocInfo::RUNTIME_ENTRY);
2059 jmp(&end_profiler_check);
2061 bind(&profiler_disabled);
2062 // Call the api function.
2063 call(function_address, RelocInfo::RUNTIME_ENTRY);
2064 bind(&end_profiler_check);
2066 if (FLAG_log_timer_events) {
2067 FrameScope frame(this, StackFrame::MANUAL);
2068 PushSafepointRegisters();
2069 PrepareCallCFunction(1, eax);
2070 mov(Operand(esp, 0),
2071 Immediate(ExternalReference::isolate_address(isolate())));
2072 CallCFunction(ExternalReference::log_leave_external_function(isolate()), 1);
2073 PopSafepointRegisters();
2077 if (returns_handle) {
2078 if (!kReturnHandlesDirectly) {
2079 // PrepareCallApiFunction saved pointer to the output slot into
2080 // callee-save register esi.
2081 mov(eax, Operand(esi, 0));
2084 // Check if the result handle holds 0.
2086 j(zero, &empty_handle);
2087 // It was non-zero. Dereference to get the result value.
2088 mov(eax, Operand(eax, 0));
2090 bind(&empty_handle);
2092 // Load the value from ReturnValue
2093 mov(eax, Operand(ebp, return_value_offset * kPointerSize));
2095 Label promote_scheduled_exception;
2096 Label delete_allocated_handles;
2097 Label leave_exit_frame;
2100 // No more valid handles (the result handle was the last one). Restore
2101 // previous handle scope.
2102 mov(Operand::StaticVariable(next_address), ebx);
2103 sub(Operand::StaticVariable(level_address), Immediate(1));
2104 Assert(above_equal, kInvalidHandleScopeLevel);
2105 cmp(edi, Operand::StaticVariable(limit_address));
2106 j(not_equal, &delete_allocated_handles);
2107 bind(&leave_exit_frame);
2109 // Check if the function scheduled an exception.
2110 ExternalReference scheduled_exception_address =
2111 ExternalReference::scheduled_exception_address(isolate());
2112 cmp(Operand::StaticVariable(scheduled_exception_address),
2113 Immediate(isolate()->factory()->the_hole_value()));
2114 j(not_equal, &promote_scheduled_exception);
2116 #if ENABLE_EXTRA_CHECKS
2117 // Check if the function returned a valid JavaScript value.
2119 Register return_value = eax;
2122 JumpIfSmi(return_value, &ok, Label::kNear);
2123 mov(map, FieldOperand(return_value, HeapObject::kMapOffset));
2125 CmpInstanceType(map, FIRST_NONSTRING_TYPE);
2126 j(below, &ok, Label::kNear);
2128 CmpInstanceType(map, FIRST_SPEC_OBJECT_TYPE);
2129 j(above_equal, &ok, Label::kNear);
2131 cmp(map, isolate()->factory()->heap_number_map());
2132 j(equal, &ok, Label::kNear);
2134 cmp(return_value, isolate()->factory()->undefined_value());
2135 j(equal, &ok, Label::kNear);
2137 cmp(return_value, isolate()->factory()->true_value());
2138 j(equal, &ok, Label::kNear);
2140 cmp(return_value, isolate()->factory()->false_value());
2141 j(equal, &ok, Label::kNear);
2143 cmp(return_value, isolate()->factory()->null_value());
2144 j(equal, &ok, Label::kNear);
2146 Abort(kAPICallReturnedInvalidObject);
2151 LeaveApiExitFrame();
2152 ret(stack_space * kPointerSize);
2154 bind(&promote_scheduled_exception);
2155 TailCallRuntime(Runtime::kPromoteScheduledException, 0, 1);
2157 // HandleScope limit has changed. Delete allocated extensions.
2158 ExternalReference delete_extensions =
2159 ExternalReference::delete_handle_scope_extensions(isolate());
2160 bind(&delete_allocated_handles);
2161 mov(Operand::StaticVariable(limit_address), edi);
2163 mov(Operand(esp, 0),
2164 Immediate(ExternalReference::isolate_address(isolate())));
2165 mov(eax, Immediate(delete_extensions));
2168 jmp(&leave_exit_frame);
2172 void MacroAssembler::JumpToExternalReference(const ExternalReference& ext) {
2173 // Set the entry point and jump to the C entry runtime stub.
2174 mov(ebx, Immediate(ext));
2176 jmp(ces.GetCode(isolate()), RelocInfo::CODE_TARGET);
2180 void MacroAssembler::SetCallKind(Register dst, CallKind call_kind) {
2181 // This macro takes the dst register to make the code more readable
2182 // at the call sites. However, the dst register has to be ecx to
2183 // follow the calling convention which requires the call type to be
2185 ASSERT(dst.is(ecx));
2186 if (call_kind == CALL_AS_FUNCTION) {
2187 // Set to some non-zero smi by updating the least significant
2189 mov_b(dst, 1 << kSmiTagSize);
2191 // Set to smi zero by clearing the register.
2197 void MacroAssembler::InvokePrologue(const ParameterCount& expected,
2198 const ParameterCount& actual,
2199 Handle<Code> code_constant,
2200 const Operand& code_operand,
2202 bool* definitely_mismatches,
2204 Label::Distance done_near,
2205 const CallWrapper& call_wrapper,
2206 CallKind call_kind) {
2207 bool definitely_matches = false;
2208 *definitely_mismatches = false;
2210 if (expected.is_immediate()) {
2211 ASSERT(actual.is_immediate());
2212 if (expected.immediate() == actual.immediate()) {
2213 definitely_matches = true;
2215 mov(eax, actual.immediate());
2216 const int sentinel = SharedFunctionInfo::kDontAdaptArgumentsSentinel;
2217 if (expected.immediate() == sentinel) {
2218 // Don't worry about adapting arguments for builtins that
2219 // don't want that done. Skip adaption code by making it look
2220 // like we have a match between expected and actual number of
2222 definitely_matches = true;
2224 *definitely_mismatches = true;
2225 mov(ebx, expected.immediate());
2229 if (actual.is_immediate()) {
2230 // Expected is in register, actual is immediate. This is the
2231 // case when we invoke function values without going through the
2233 cmp(expected.reg(), actual.immediate());
2235 ASSERT(expected.reg().is(ebx));
2236 mov(eax, actual.immediate());
2237 } else if (!expected.reg().is(actual.reg())) {
2238 // Both expected and actual are in (different) registers. This
2239 // is the case when we invoke functions using call and apply.
2240 cmp(expected.reg(), actual.reg());
2242 ASSERT(actual.reg().is(eax));
2243 ASSERT(expected.reg().is(ebx));
2247 if (!definitely_matches) {
2248 Handle<Code> adaptor =
2249 isolate()->builtins()->ArgumentsAdaptorTrampoline();
2250 if (!code_constant.is_null()) {
2251 mov(edx, Immediate(code_constant));
2252 add(edx, Immediate(Code::kHeaderSize - kHeapObjectTag));
2253 } else if (!code_operand.is_reg(edx)) {
2254 mov(edx, code_operand);
2257 if (flag == CALL_FUNCTION) {
2258 call_wrapper.BeforeCall(CallSize(adaptor, RelocInfo::CODE_TARGET));
2259 SetCallKind(ecx, call_kind);
2260 call(adaptor, RelocInfo::CODE_TARGET);
2261 call_wrapper.AfterCall();
2262 if (!*definitely_mismatches) {
2263 jmp(done, done_near);
2266 SetCallKind(ecx, call_kind);
2267 jmp(adaptor, RelocInfo::CODE_TARGET);
2274 void MacroAssembler::InvokeCode(const Operand& code,
2275 const ParameterCount& expected,
2276 const ParameterCount& actual,
2278 const CallWrapper& call_wrapper,
2279 CallKind call_kind) {
2280 // You can't call a function without a valid frame.
2281 ASSERT(flag == JUMP_FUNCTION || has_frame());
2284 bool definitely_mismatches = false;
2285 InvokePrologue(expected, actual, Handle<Code>::null(), code,
2286 &done, &definitely_mismatches, flag, Label::kNear,
2287 call_wrapper, call_kind);
2288 if (!definitely_mismatches) {
2289 if (flag == CALL_FUNCTION) {
2290 call_wrapper.BeforeCall(CallSize(code));
2291 SetCallKind(ecx, call_kind);
2293 call_wrapper.AfterCall();
2295 ASSERT(flag == JUMP_FUNCTION);
2296 SetCallKind(ecx, call_kind);
2304 void MacroAssembler::InvokeCode(Handle<Code> code,
2305 const ParameterCount& expected,
2306 const ParameterCount& actual,
2307 RelocInfo::Mode rmode,
2309 const CallWrapper& call_wrapper,
2310 CallKind call_kind) {
2311 // You can't call a function without a valid frame.
2312 ASSERT(flag == JUMP_FUNCTION || has_frame());
2315 Operand dummy(eax, 0);
2316 bool definitely_mismatches = false;
2317 InvokePrologue(expected, actual, code, dummy, &done, &definitely_mismatches,
2318 flag, Label::kNear, call_wrapper, call_kind);
2319 if (!definitely_mismatches) {
2320 if (flag == CALL_FUNCTION) {
2321 call_wrapper.BeforeCall(CallSize(code, rmode));
2322 SetCallKind(ecx, call_kind);
2324 call_wrapper.AfterCall();
2326 ASSERT(flag == JUMP_FUNCTION);
2327 SetCallKind(ecx, call_kind);
2335 void MacroAssembler::InvokeFunction(Register fun,
2336 const ParameterCount& actual,
2338 const CallWrapper& call_wrapper,
2339 CallKind call_kind) {
2340 // You can't call a function without a valid frame.
2341 ASSERT(flag == JUMP_FUNCTION || has_frame());
2343 ASSERT(fun.is(edi));
2344 mov(edx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
2345 mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
2346 mov(ebx, FieldOperand(edx, SharedFunctionInfo::kFormalParameterCountOffset));
2349 ParameterCount expected(ebx);
2350 InvokeCode(FieldOperand(edi, JSFunction::kCodeEntryOffset),
2351 expected, actual, flag, call_wrapper, call_kind);
2355 void MacroAssembler::InvokeFunction(Handle<JSFunction> function,
2356 const ParameterCount& expected,
2357 const ParameterCount& actual,
2359 const CallWrapper& call_wrapper,
2360 CallKind call_kind) {
2361 // You can't call a function without a valid frame.
2362 ASSERT(flag == JUMP_FUNCTION || has_frame());
2364 // Get the function and setup the context.
2365 LoadHeapObject(edi, function);
2366 mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
2368 // We call indirectly through the code field in the function to
2369 // allow recompilation to take effect without changing any of the
2371 InvokeCode(FieldOperand(edi, JSFunction::kCodeEntryOffset),
2372 expected, actual, flag, call_wrapper, call_kind);
2376 void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id,
2378 const CallWrapper& call_wrapper) {
2379 // You can't call a builtin without a valid frame.
2380 ASSERT(flag == JUMP_FUNCTION || has_frame());
2382 // Rely on the assertion to check that the number of provided
2383 // arguments match the expected number of arguments. Fake a
2384 // parameter count to avoid emitting code to do the check.
2385 ParameterCount expected(0);
2386 GetBuiltinFunction(edi, id);
2387 InvokeCode(FieldOperand(edi, JSFunction::kCodeEntryOffset),
2388 expected, expected, flag, call_wrapper, CALL_AS_METHOD);
2392 void MacroAssembler::GetBuiltinFunction(Register target,
2393 Builtins::JavaScript id) {
2394 // Load the JavaScript builtin function from the builtins object.
2395 mov(target, Operand(esi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
2396 mov(target, FieldOperand(target, GlobalObject::kBuiltinsOffset));
2397 mov(target, FieldOperand(target,
2398 JSBuiltinsObject::OffsetOfFunctionWithId(id)));
2402 void MacroAssembler::GetBuiltinEntry(Register target, Builtins::JavaScript id) {
2403 ASSERT(!target.is(edi));
2404 // Load the JavaScript builtin function from the builtins object.
2405 GetBuiltinFunction(edi, id);
2406 // Load the code entry point from the function into the target register.
2407 mov(target, FieldOperand(edi, JSFunction::kCodeEntryOffset));
2411 void MacroAssembler::LoadContext(Register dst, int context_chain_length) {
2412 if (context_chain_length > 0) {
2413 // Move up the chain of contexts to the context containing the slot.
2414 mov(dst, Operand(esi, Context::SlotOffset(Context::PREVIOUS_INDEX)));
2415 for (int i = 1; i < context_chain_length; i++) {
2416 mov(dst, Operand(dst, Context::SlotOffset(Context::PREVIOUS_INDEX)));
2419 // Slot is in the current function context. Move it into the
2420 // destination register in case we store into it (the write barrier
2421 // cannot be allowed to destroy the context in esi).
2425 // We should not have found a with context by walking the context chain
2426 // (i.e., the static scope chain and runtime context chain do not agree).
2427 // A variable occurring in such a scope should have slot type LOOKUP and
2429 if (emit_debug_code()) {
2430 cmp(FieldOperand(dst, HeapObject::kMapOffset),
2431 isolate()->factory()->with_context_map());
2432 Check(not_equal, kVariableResolvedToWithContext);
2437 void MacroAssembler::LoadTransitionedArrayMapConditional(
2438 ElementsKind expected_kind,
2439 ElementsKind transitioned_kind,
2440 Register map_in_out,
2442 Label* no_map_match) {
2443 // Load the global or builtins object from the current context.
2444 mov(scratch, Operand(esi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
2445 mov(scratch, FieldOperand(scratch, GlobalObject::kNativeContextOffset));
2447 // Check that the function's map is the same as the expected cached map.
2448 mov(scratch, Operand(scratch,
2449 Context::SlotOffset(Context::JS_ARRAY_MAPS_INDEX)));
2451 size_t offset = expected_kind * kPointerSize +
2452 FixedArrayBase::kHeaderSize;
2453 cmp(map_in_out, FieldOperand(scratch, offset));
2454 j(not_equal, no_map_match);
2456 // Use the transitioned cached map.
2457 offset = transitioned_kind * kPointerSize +
2458 FixedArrayBase::kHeaderSize;
2459 mov(map_in_out, FieldOperand(scratch, offset));
2463 void MacroAssembler::LoadInitialArrayMap(
2464 Register function_in, Register scratch,
2465 Register map_out, bool can_have_holes) {
2466 ASSERT(!function_in.is(map_out));
2468 mov(map_out, FieldOperand(function_in,
2469 JSFunction::kPrototypeOrInitialMapOffset));
2470 if (!FLAG_smi_only_arrays) {
2471 ElementsKind kind = can_have_holes ? FAST_HOLEY_ELEMENTS : FAST_ELEMENTS;
2472 LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS,
2477 } else if (can_have_holes) {
2478 LoadTransitionedArrayMapConditional(FAST_SMI_ELEMENTS,
2479 FAST_HOLEY_SMI_ELEMENTS,
2488 void MacroAssembler::LoadGlobalContext(Register global_context) {
2489 // Load the global or builtins object from the current context.
2491 Operand(esi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
2492 // Load the native context from the global or builtins object.
2494 FieldOperand(global_context, GlobalObject::kNativeContextOffset));
2498 void MacroAssembler::LoadGlobalFunction(int index, Register function) {
2499 // Load the global or builtins object from the current context.
2501 Operand(esi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
2502 // Load the native context from the global or builtins object.
2504 FieldOperand(function, GlobalObject::kNativeContextOffset));
2505 // Load the function from the native context.
2506 mov(function, Operand(function, Context::SlotOffset(index)));
2510 void MacroAssembler::LoadGlobalFunctionInitialMap(Register function,
2512 // Load the initial map. The global functions all have initial maps.
2513 mov(map, FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
2514 if (emit_debug_code()) {
2516 CheckMap(map, isolate()->factory()->meta_map(), &fail, DO_SMI_CHECK);
2519 Abort(kGlobalFunctionsMustHaveInitialMap);
2525 // Store the value in register src in the safepoint register stack
2526 // slot for register dst.
2527 void MacroAssembler::StoreToSafepointRegisterSlot(Register dst, Register src) {
2528 mov(SafepointRegisterSlot(dst), src);
2532 void MacroAssembler::StoreToSafepointRegisterSlot(Register dst, Immediate src) {
2533 mov(SafepointRegisterSlot(dst), src);
2537 void MacroAssembler::LoadFromSafepointRegisterSlot(Register dst, Register src) {
2538 mov(dst, SafepointRegisterSlot(src));
2542 Operand MacroAssembler::SafepointRegisterSlot(Register reg) {
2543 return Operand(esp, SafepointRegisterStackIndex(reg.code()) * kPointerSize);
2547 int MacroAssembler::SafepointRegisterStackIndex(int reg_code) {
2548 // The registers are pushed starting with the lowest encoding,
2549 // which means that lowest encodings are furthest away from
2550 // the stack pointer.
2551 ASSERT(reg_code >= 0 && reg_code < kNumSafepointRegisters);
2552 return kNumSafepointRegisters - reg_code - 1;
2556 void MacroAssembler::LoadHeapObject(Register result,
2557 Handle<HeapObject> object) {
2558 AllowDeferredHandleDereference embedding_raw_address;
2559 if (isolate()->heap()->InNewSpace(*object)) {
2560 Handle<Cell> cell = isolate()->factory()->NewCell(object);
2561 mov(result, Operand::ForCell(cell));
2563 mov(result, object);
2568 void MacroAssembler::CmpHeapObject(Register reg, Handle<HeapObject> object) {
2569 AllowDeferredHandleDereference using_raw_address;
2570 if (isolate()->heap()->InNewSpace(*object)) {
2571 Handle<Cell> cell = isolate()->factory()->NewCell(object);
2572 cmp(reg, Operand::ForCell(cell));
2579 void MacroAssembler::PushHeapObject(Handle<HeapObject> object) {
2580 AllowDeferredHandleDereference using_raw_address;
2581 if (isolate()->heap()->InNewSpace(*object)) {
2582 Handle<Cell> cell = isolate()->factory()->NewCell(object);
2583 push(Operand::ForCell(cell));
2590 void MacroAssembler::Ret() {
2595 void MacroAssembler::Ret(int bytes_dropped, Register scratch) {
2596 if (is_uint16(bytes_dropped)) {
2600 add(esp, Immediate(bytes_dropped));
2607 void MacroAssembler::VerifyX87StackDepth(uint32_t depth) {
2608 // Make sure the floating point stack is either empty or has depth items.
2611 // The top-of-stack (tos) is 7 if there is one item pushed.
2612 int tos = (8 - depth) % 8;
2613 const int kTopMask = 0x3800;
2617 and_(eax, kTopMask);
2619 cmp(eax, Immediate(tos));
2620 Check(equal, kUnexpectedFPUStackDepthAfterInstruction);
2626 void MacroAssembler::Drop(int stack_elements) {
2627 if (stack_elements > 0) {
2628 add(esp, Immediate(stack_elements * kPointerSize));
2633 void MacroAssembler::Move(Register dst, Register src) {
2640 void MacroAssembler::SetCounter(StatsCounter* counter, int value) {
2641 if (FLAG_native_code_counters && counter->Enabled()) {
2642 mov(Operand::StaticVariable(ExternalReference(counter)), Immediate(value));
2647 void MacroAssembler::IncrementCounter(StatsCounter* counter, int value) {
2649 if (FLAG_native_code_counters && counter->Enabled()) {
2650 Operand operand = Operand::StaticVariable(ExternalReference(counter));
2654 add(operand, Immediate(value));
2660 void MacroAssembler::DecrementCounter(StatsCounter* counter, int value) {
2662 if (FLAG_native_code_counters && counter->Enabled()) {
2663 Operand operand = Operand::StaticVariable(ExternalReference(counter));
2667 sub(operand, Immediate(value));
2673 void MacroAssembler::IncrementCounter(Condition cc,
2674 StatsCounter* counter,
2677 if (FLAG_native_code_counters && counter->Enabled()) {
2679 j(NegateCondition(cc), &skip);
2681 IncrementCounter(counter, value);
2688 void MacroAssembler::DecrementCounter(Condition cc,
2689 StatsCounter* counter,
2692 if (FLAG_native_code_counters && counter->Enabled()) {
2694 j(NegateCondition(cc), &skip);
2696 DecrementCounter(counter, value);
2703 void MacroAssembler::Assert(Condition cc, BailoutReason reason) {
2704 if (emit_debug_code()) Check(cc, reason);
2708 void MacroAssembler::AssertFastElements(Register elements) {
2709 if (emit_debug_code()) {
2710 Factory* factory = isolate()->factory();
2712 cmp(FieldOperand(elements, HeapObject::kMapOffset),
2713 Immediate(factory->fixed_array_map()));
2715 cmp(FieldOperand(elements, HeapObject::kMapOffset),
2716 Immediate(factory->fixed_double_array_map()));
2718 cmp(FieldOperand(elements, HeapObject::kMapOffset),
2719 Immediate(factory->fixed_cow_array_map()));
2721 Abort(kJSObjectWithFastElementsMapHasSlowElements);
2727 void MacroAssembler::Check(Condition cc, BailoutReason reason) {
2731 // will not return here
2736 void MacroAssembler::CheckStackAlignment() {
2737 int frame_alignment = OS::ActivationFrameAlignment();
2738 int frame_alignment_mask = frame_alignment - 1;
2739 if (frame_alignment > kPointerSize) {
2740 ASSERT(IsPowerOf2(frame_alignment));
2741 Label alignment_as_expected;
2742 test(esp, Immediate(frame_alignment_mask));
2743 j(zero, &alignment_as_expected);
2744 // Abort if stack is not aligned.
2746 bind(&alignment_as_expected);
2751 void MacroAssembler::Abort(BailoutReason reason) {
2752 // We want to pass the msg string like a smi to avoid GC
2753 // problems, however msg is not guaranteed to be aligned
2754 // properly. Instead, we pass an aligned pointer that is
2755 // a proper v8 smi, but also pass the alignment difference
2756 // from the real pointer as a smi.
2757 const char* msg = GetBailoutReason(reason);
2758 intptr_t p1 = reinterpret_cast<intptr_t>(msg);
2759 intptr_t p0 = (p1 & ~kSmiTagMask) + kSmiTag;
2760 ASSERT(reinterpret_cast<Object*>(p0)->IsSmi());
2763 RecordComment("Abort message: ");
2769 push(Immediate(p0));
2770 push(Immediate(reinterpret_cast<intptr_t>(Smi::FromInt(p1 - p0))));
2771 // Disable stub call restrictions to always allow calls to abort.
2773 // We don't actually want to generate a pile of code for this, so just
2774 // claim there is a stack frame, without generating one.
2775 FrameScope scope(this, StackFrame::NONE);
2776 CallRuntime(Runtime::kAbort, 2);
2778 CallRuntime(Runtime::kAbort, 2);
2780 // will not return here
2785 void MacroAssembler::LoadInstanceDescriptors(Register map,
2786 Register descriptors) {
2787 mov(descriptors, FieldOperand(map, Map::kDescriptorsOffset));
2791 void MacroAssembler::NumberOfOwnDescriptors(Register dst, Register map) {
2792 mov(dst, FieldOperand(map, Map::kBitField3Offset));
2793 DecodeField<Map::NumberOfOwnDescriptorsBits>(dst);
2797 void MacroAssembler::LoadPowerOf2(XMMRegister dst,
2800 ASSERT(is_uintn(power + HeapNumber::kExponentBias,
2801 HeapNumber::kExponentBits));
2802 mov(scratch, Immediate(power + HeapNumber::kExponentBias));
2804 psllq(dst, HeapNumber::kMantissaBits);
2808 void MacroAssembler::JumpIfInstanceTypeIsNotSequentialAscii(
2809 Register instance_type,
2812 if (!scratch.is(instance_type)) {
2813 mov(scratch, instance_type);
2816 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask);
2817 cmp(scratch, kStringTag | kSeqStringTag | kOneByteStringTag);
2818 j(not_equal, failure);
2822 void MacroAssembler::JumpIfNotBothSequentialAsciiStrings(Register object1,
2827 // Check that both objects are not smis.
2828 STATIC_ASSERT(kSmiTag == 0);
2829 mov(scratch1, object1);
2830 and_(scratch1, object2);
2831 JumpIfSmi(scratch1, failure);
2833 // Load instance type for both strings.
2834 mov(scratch1, FieldOperand(object1, HeapObject::kMapOffset));
2835 mov(scratch2, FieldOperand(object2, HeapObject::kMapOffset));
2836 movzx_b(scratch1, FieldOperand(scratch1, Map::kInstanceTypeOffset));
2837 movzx_b(scratch2, FieldOperand(scratch2, Map::kInstanceTypeOffset));
2839 // Check that both are flat ASCII strings.
2840 const int kFlatAsciiStringMask =
2841 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
2842 const int kFlatAsciiStringTag =
2843 kStringTag | kOneByteStringTag | kSeqStringTag;
2844 // Interleave bits from both instance types and compare them in one check.
2845 ASSERT_EQ(0, kFlatAsciiStringMask & (kFlatAsciiStringMask << 3));
2846 and_(scratch1, kFlatAsciiStringMask);
2847 and_(scratch2, kFlatAsciiStringMask);
2848 lea(scratch1, Operand(scratch1, scratch2, times_8, 0));
2849 cmp(scratch1, kFlatAsciiStringTag | (kFlatAsciiStringTag << 3));
2850 j(not_equal, failure);
2854 void MacroAssembler::JumpIfNotUniqueName(Operand operand,
2855 Label* not_unique_name,
2856 Label::Distance distance) {
2857 STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0);
2859 test(operand, Immediate(kIsNotStringMask | kIsNotInternalizedMask));
2861 cmpb(operand, static_cast<uint8_t>(SYMBOL_TYPE));
2862 j(not_equal, not_unique_name, distance);
2868 void MacroAssembler::PrepareCallCFunction(int num_arguments, Register scratch) {
2869 int frame_alignment = OS::ActivationFrameAlignment();
2870 if (frame_alignment != 0) {
2871 // Make stack end at alignment and make room for num_arguments words
2872 // and the original value of esp.
2874 sub(esp, Immediate((num_arguments + 1) * kPointerSize));
2875 ASSERT(IsPowerOf2(frame_alignment));
2876 and_(esp, -frame_alignment);
2877 mov(Operand(esp, num_arguments * kPointerSize), scratch);
2879 sub(esp, Immediate(num_arguments * kPointerSize));
2884 void MacroAssembler::CallCFunction(ExternalReference function,
2885 int num_arguments) {
2886 // Trashing eax is ok as it will be the return value.
2887 mov(eax, Immediate(function));
2888 CallCFunction(eax, num_arguments);
2892 void MacroAssembler::CallCFunction(Register function,
2893 int num_arguments) {
2894 ASSERT(has_frame());
2895 // Check stack alignment.
2896 if (emit_debug_code()) {
2897 CheckStackAlignment();
2901 if (OS::ActivationFrameAlignment() != 0) {
2902 mov(esp, Operand(esp, num_arguments * kPointerSize));
2904 add(esp, Immediate(num_arguments * kPointerSize));
2909 bool AreAliased(Register r1, Register r2, Register r3, Register r4) {
2910 if (r1.is(r2)) return true;
2911 if (r1.is(r3)) return true;
2912 if (r1.is(r4)) return true;
2913 if (r2.is(r3)) return true;
2914 if (r2.is(r4)) return true;
2915 if (r3.is(r4)) return true;
2920 CodePatcher::CodePatcher(byte* address, int size)
2921 : address_(address),
2923 masm_(NULL, address, size + Assembler::kGap) {
2924 // Create a new macro assembler pointing to the address of the code to patch.
2925 // The size is adjusted with kGap on order for the assembler to generate size
2926 // bytes of instructions without failing with buffer size constraints.
2927 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
2931 CodePatcher::~CodePatcher() {
2932 // Indicate that code has changed.
2933 CPU::FlushICache(address_, size_);
2935 // Check that the code was patched as expected.
2936 ASSERT(masm_.pc_ == address_ + size_);
2937 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
2941 void MacroAssembler::CheckPageFlag(
2946 Label* condition_met,
2947 Label::Distance condition_met_distance) {
2948 ASSERT(cc == zero || cc == not_zero);
2949 if (scratch.is(object)) {
2950 and_(scratch, Immediate(~Page::kPageAlignmentMask));
2952 mov(scratch, Immediate(~Page::kPageAlignmentMask));
2953 and_(scratch, object);
2955 if (mask < (1 << kBitsPerByte)) {
2956 test_b(Operand(scratch, MemoryChunk::kFlagsOffset),
2957 static_cast<uint8_t>(mask));
2959 test(Operand(scratch, MemoryChunk::kFlagsOffset), Immediate(mask));
2961 j(cc, condition_met, condition_met_distance);
2965 void MacroAssembler::CheckPageFlagForMap(
2969 Label* condition_met,
2970 Label::Distance condition_met_distance) {
2971 ASSERT(cc == zero || cc == not_zero);
2972 Page* page = Page::FromAddress(map->address());
2973 ExternalReference reference(ExternalReference::page_flags(page));
2974 // The inlined static address check of the page's flags relies
2975 // on maps never being compacted.
2976 ASSERT(!isolate()->heap()->mark_compact_collector()->
2977 IsOnEvacuationCandidate(*map));
2978 if (mask < (1 << kBitsPerByte)) {
2979 test_b(Operand::StaticVariable(reference), static_cast<uint8_t>(mask));
2981 test(Operand::StaticVariable(reference), Immediate(mask));
2983 j(cc, condition_met, condition_met_distance);
2987 void MacroAssembler::CheckMapDeprecated(Handle<Map> map,
2989 Label* if_deprecated) {
2990 if (map->CanBeDeprecated()) {
2992 mov(scratch, FieldOperand(scratch, Map::kBitField3Offset));
2993 and_(scratch, Immediate(Smi::FromInt(Map::Deprecated::kMask)));
2994 j(not_zero, if_deprecated);
2999 void MacroAssembler::JumpIfBlack(Register object,
3003 Label::Distance on_black_near) {
3004 HasColor(object, scratch0, scratch1,
3005 on_black, on_black_near,
3006 1, 0); // kBlackBitPattern.
3007 ASSERT(strcmp(Marking::kBlackBitPattern, "10") == 0);
3011 void MacroAssembler::HasColor(Register object,
3012 Register bitmap_scratch,
3013 Register mask_scratch,
3015 Label::Distance has_color_distance,
3018 ASSERT(!AreAliased(object, bitmap_scratch, mask_scratch, ecx));
3020 GetMarkBits(object, bitmap_scratch, mask_scratch);
3022 Label other_color, word_boundary;
3023 test(mask_scratch, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
3024 j(first_bit == 1 ? zero : not_zero, &other_color, Label::kNear);
3025 add(mask_scratch, mask_scratch); // Shift left 1 by adding.
3026 j(zero, &word_boundary, Label::kNear);
3027 test(mask_scratch, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
3028 j(second_bit == 1 ? not_zero : zero, has_color, has_color_distance);
3029 jmp(&other_color, Label::kNear);
3031 bind(&word_boundary);
3032 test_b(Operand(bitmap_scratch, MemoryChunk::kHeaderSize + kPointerSize), 1);
3034 j(second_bit == 1 ? not_zero : zero, has_color, has_color_distance);
3039 void MacroAssembler::GetMarkBits(Register addr_reg,
3040 Register bitmap_reg,
3041 Register mask_reg) {
3042 ASSERT(!AreAliased(addr_reg, mask_reg, bitmap_reg, ecx));
3043 mov(bitmap_reg, Immediate(~Page::kPageAlignmentMask));
3044 and_(bitmap_reg, addr_reg);
3047 Bitmap::kBitsPerCellLog2 + kPointerSizeLog2 - Bitmap::kBytesPerCellLog2;
3050 (Page::kPageAlignmentMask >> shift) & ~(Bitmap::kBytesPerCell - 1));
3052 add(bitmap_reg, ecx);
3054 shr(ecx, kPointerSizeLog2);
3055 and_(ecx, (1 << Bitmap::kBitsPerCellLog2) - 1);
3056 mov(mask_reg, Immediate(1));
3061 void MacroAssembler::EnsureNotWhite(
3063 Register bitmap_scratch,
3064 Register mask_scratch,
3065 Label* value_is_white_and_not_data,
3066 Label::Distance distance) {
3067 ASSERT(!AreAliased(value, bitmap_scratch, mask_scratch, ecx));
3068 GetMarkBits(value, bitmap_scratch, mask_scratch);
3070 // If the value is black or grey we don't need to do anything.
3071 ASSERT(strcmp(Marking::kWhiteBitPattern, "00") == 0);
3072 ASSERT(strcmp(Marking::kBlackBitPattern, "10") == 0);
3073 ASSERT(strcmp(Marking::kGreyBitPattern, "11") == 0);
3074 ASSERT(strcmp(Marking::kImpossibleBitPattern, "01") == 0);
3078 // Since both black and grey have a 1 in the first position and white does
3079 // not have a 1 there we only need to check one bit.
3080 test(mask_scratch, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
3081 j(not_zero, &done, Label::kNear);
3083 if (emit_debug_code()) {
3084 // Check for impossible bit pattern.
3087 // shl. May overflow making the check conservative.
3088 add(mask_scratch, mask_scratch);
3089 test(mask_scratch, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
3090 j(zero, &ok, Label::kNear);
3096 // Value is white. We check whether it is data that doesn't need scanning.
3097 // Currently only checks for HeapNumber and non-cons strings.
3098 Register map = ecx; // Holds map while checking type.
3099 Register length = ecx; // Holds length of object after checking type.
3100 Label not_heap_number;
3101 Label is_data_object;
3103 // Check for heap-number
3104 mov(map, FieldOperand(value, HeapObject::kMapOffset));
3105 cmp(map, isolate()->factory()->heap_number_map());
3106 j(not_equal, ¬_heap_number, Label::kNear);
3107 mov(length, Immediate(HeapNumber::kSize));
3108 jmp(&is_data_object, Label::kNear);
3110 bind(¬_heap_number);
3111 // Check for strings.
3112 ASSERT(kIsIndirectStringTag == 1 && kIsIndirectStringMask == 1);
3113 ASSERT(kNotStringTag == 0x80 && kIsNotStringMask == 0x80);
3114 // If it's a string and it's not a cons string then it's an object containing
3116 Register instance_type = ecx;
3117 movzx_b(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
3118 test_b(instance_type, kIsIndirectStringMask | kIsNotStringMask);
3119 j(not_zero, value_is_white_and_not_data);
3120 // It's a non-indirect (non-cons and non-slice) string.
3121 // If it's external, the length is just ExternalString::kSize.
3122 // Otherwise it's String::kHeaderSize + string->length() * (1 or 2).
3124 // External strings are the only ones with the kExternalStringTag bit
3126 ASSERT_EQ(0, kSeqStringTag & kExternalStringTag);
3127 ASSERT_EQ(0, kConsStringTag & kExternalStringTag);
3128 test_b(instance_type, kExternalStringTag);
3129 j(zero, ¬_external, Label::kNear);
3130 mov(length, Immediate(ExternalString::kSize));
3131 jmp(&is_data_object, Label::kNear);
3133 bind(¬_external);
3134 // Sequential string, either ASCII or UC16.
3135 ASSERT(kOneByteStringTag == 0x04);
3136 and_(length, Immediate(kStringEncodingMask));
3137 xor_(length, Immediate(kStringEncodingMask));
3138 add(length, Immediate(0x04));
3139 // Value now either 4 (if ASCII) or 8 (if UC16), i.e., char-size shifted
3140 // by 2. If we multiply the string length as smi by this, it still
3141 // won't overflow a 32-bit value.
3142 ASSERT_EQ(SeqOneByteString::kMaxSize, SeqTwoByteString::kMaxSize);
3143 ASSERT(SeqOneByteString::kMaxSize <=
3144 static_cast<int>(0xffffffffu >> (2 + kSmiTagSize)));
3145 imul(length, FieldOperand(value, String::kLengthOffset));
3146 shr(length, 2 + kSmiTagSize + kSmiShiftSize);
3147 add(length, Immediate(SeqString::kHeaderSize + kObjectAlignmentMask));
3148 and_(length, Immediate(~kObjectAlignmentMask));
3150 bind(&is_data_object);
3151 // Value is a data object, and it is white. Mark it black. Since we know
3152 // that the object is white we can make it black by flipping one bit.
3153 or_(Operand(bitmap_scratch, MemoryChunk::kHeaderSize), mask_scratch);
3155 and_(bitmap_scratch, Immediate(~Page::kPageAlignmentMask));
3156 add(Operand(bitmap_scratch, MemoryChunk::kLiveBytesOffset),
3158 if (emit_debug_code()) {
3159 mov(length, Operand(bitmap_scratch, MemoryChunk::kLiveBytesOffset));
3160 cmp(length, Operand(bitmap_scratch, MemoryChunk::kSizeOffset));
3161 Check(less_equal, kLiveBytesCountOverflowChunkSize);
3168 void MacroAssembler::EnumLength(Register dst, Register map) {
3169 STATIC_ASSERT(Map::EnumLengthBits::kShift == 0);
3170 mov(dst, FieldOperand(map, Map::kBitField3Offset));
3171 and_(dst, Immediate(Smi::FromInt(Map::EnumLengthBits::kMask)));
3175 void MacroAssembler::CheckEnumCache(Label* call_runtime) {
3179 // Check if the enum length field is properly initialized, indicating that
3180 // there is an enum cache.
3181 mov(ebx, FieldOperand(ecx, HeapObject::kMapOffset));
3183 EnumLength(edx, ebx);
3184 cmp(edx, Immediate(Smi::FromInt(Map::kInvalidEnumCache)));
3185 j(equal, call_runtime);
3190 mov(ebx, FieldOperand(ecx, HeapObject::kMapOffset));
3192 // For all objects but the receiver, check that the cache is empty.
3193 EnumLength(edx, ebx);
3194 cmp(edx, Immediate(Smi::FromInt(0)));
3195 j(not_equal, call_runtime);
3199 // Check that there are no elements. Register rcx contains the current JS
3200 // object we've reached through the prototype chain.
3201 mov(ecx, FieldOperand(ecx, JSObject::kElementsOffset));
3202 cmp(ecx, isolate()->factory()->empty_fixed_array());
3203 j(not_equal, call_runtime);
3205 mov(ecx, FieldOperand(ebx, Map::kPrototypeOffset));
3206 cmp(ecx, isolate()->factory()->null_value());
3207 j(not_equal, &next);
3211 void MacroAssembler::TestJSArrayForAllocationMemento(
3212 Register receiver_reg,
3213 Register scratch_reg) {
3214 Label no_memento_available;
3216 ExternalReference new_space_start =
3217 ExternalReference::new_space_start(isolate());
3218 ExternalReference new_space_allocation_top =
3219 ExternalReference::new_space_allocation_top_address(isolate());
3221 lea(scratch_reg, Operand(receiver_reg,
3222 JSArray::kSize + AllocationMemento::kSize - kHeapObjectTag));
3223 cmp(scratch_reg, Immediate(new_space_start));
3224 j(less, &no_memento_available);
3225 cmp(scratch_reg, Operand::StaticVariable(new_space_allocation_top));
3226 j(greater, &no_memento_available);
3227 cmp(MemOperand(scratch_reg, -AllocationMemento::kSize),
3228 Immediate(Handle<Map>(isolate()->heap()->allocation_memento_map())));
3229 bind(&no_memento_available);
3233 } } // namespace v8::internal
3235 #endif // V8_TARGET_ARCH_IA32