1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
7 #if V8_TARGET_ARCH_IA32
9 #include "src/bootstrapper.h"
10 #include "src/codegen.h"
11 #include "src/cpu-profiler.h"
12 #include "src/debug.h"
13 #include "src/isolate-inl.h"
14 #include "src/runtime.h"
15 #include "src/serialize.h"
20 // -------------------------------------------------------------------------
21 // MacroAssembler implementation.
23 MacroAssembler::MacroAssembler(Isolate* arg_isolate, void* buffer, int size)
24 : Assembler(arg_isolate, buffer, size),
25 generating_stub_(false),
27 if (isolate() != NULL) {
28 // TODO(titzer): should we just use a null handle here instead?
29 code_object_ = Handle<Object>(isolate()->heap()->undefined_value(),
35 void MacroAssembler::Load(Register dst, const Operand& src, Representation r) {
36 ASSERT(!r.IsDouble());
39 } else if (r.IsUInteger8()) {
41 } else if (r.IsInteger16()) {
43 } else if (r.IsUInteger16()) {
51 void MacroAssembler::Store(Register src, const Operand& dst, Representation r) {
52 ASSERT(!r.IsDouble());
53 if (r.IsInteger8() || r.IsUInteger8()) {
55 } else if (r.IsInteger16() || r.IsUInteger16()) {
58 if (r.IsHeapObject()) {
60 } else if (r.IsSmi()) {
68 void MacroAssembler::LoadRoot(Register destination, Heap::RootListIndex index) {
69 if (isolate()->heap()->RootCanBeTreatedAsConstant(index)) {
70 Handle<Object> value(&isolate()->heap()->roots_array_start()[index]);
71 mov(destination, value);
74 ExternalReference roots_array_start =
75 ExternalReference::roots_array_start(isolate());
76 mov(destination, Immediate(index));
77 mov(destination, Operand::StaticArray(destination,
83 void MacroAssembler::StoreRoot(Register source,
85 Heap::RootListIndex index) {
86 ASSERT(Heap::RootCanBeWrittenAfterInitialization(index));
87 ExternalReference roots_array_start =
88 ExternalReference::roots_array_start(isolate());
89 mov(scratch, Immediate(index));
90 mov(Operand::StaticArray(scratch, times_pointer_size, roots_array_start),
95 void MacroAssembler::CompareRoot(Register with,
97 Heap::RootListIndex index) {
98 ExternalReference roots_array_start =
99 ExternalReference::roots_array_start(isolate());
100 mov(scratch, Immediate(index));
101 cmp(with, Operand::StaticArray(scratch,
107 void MacroAssembler::CompareRoot(Register with, Heap::RootListIndex index) {
108 ASSERT(isolate()->heap()->RootCanBeTreatedAsConstant(index));
109 Handle<Object> value(&isolate()->heap()->roots_array_start()[index]);
114 void MacroAssembler::CompareRoot(const Operand& with,
115 Heap::RootListIndex index) {
116 ASSERT(isolate()->heap()->RootCanBeTreatedAsConstant(index));
117 Handle<Object> value(&isolate()->heap()->roots_array_start()[index]);
122 void MacroAssembler::InNewSpace(
126 Label* condition_met,
127 Label::Distance condition_met_distance) {
128 ASSERT(cc == equal || cc == not_equal);
129 if (scratch.is(object)) {
130 and_(scratch, Immediate(~Page::kPageAlignmentMask));
132 mov(scratch, Immediate(~Page::kPageAlignmentMask));
133 and_(scratch, object);
135 // Check that we can use a test_b.
136 ASSERT(MemoryChunk::IN_FROM_SPACE < 8);
137 ASSERT(MemoryChunk::IN_TO_SPACE < 8);
138 int mask = (1 << MemoryChunk::IN_FROM_SPACE)
139 | (1 << MemoryChunk::IN_TO_SPACE);
140 // If non-zero, the page belongs to new-space.
141 test_b(Operand(scratch, MemoryChunk::kFlagsOffset),
142 static_cast<uint8_t>(mask));
143 j(cc, condition_met, condition_met_distance);
147 void MacroAssembler::RememberedSetHelper(
148 Register object, // Only used for debug checks.
151 SaveFPRegsMode save_fp,
152 MacroAssembler::RememberedSetFinalAction and_then) {
154 if (emit_debug_code()) {
156 JumpIfNotInNewSpace(object, scratch, &ok, Label::kNear);
160 // Load store buffer top.
161 ExternalReference store_buffer =
162 ExternalReference::store_buffer_top(isolate());
163 mov(scratch, Operand::StaticVariable(store_buffer));
164 // Store pointer to buffer.
165 mov(Operand(scratch, 0), addr);
166 // Increment buffer top.
167 add(scratch, Immediate(kPointerSize));
168 // Write back new top of buffer.
169 mov(Operand::StaticVariable(store_buffer), scratch);
170 // Call stub on end of buffer.
171 // Check for end of buffer.
172 test(scratch, Immediate(StoreBuffer::kStoreBufferOverflowBit));
173 if (and_then == kReturnAtEnd) {
174 Label buffer_overflowed;
175 j(not_equal, &buffer_overflowed, Label::kNear);
177 bind(&buffer_overflowed);
179 ASSERT(and_then == kFallThroughAtEnd);
180 j(equal, &done, Label::kNear);
182 StoreBufferOverflowStub store_buffer_overflow =
183 StoreBufferOverflowStub(isolate(), save_fp);
184 CallStub(&store_buffer_overflow);
185 if (and_then == kReturnAtEnd) {
188 ASSERT(and_then == kFallThroughAtEnd);
194 void MacroAssembler::ClampDoubleToUint8(XMMRegister input_reg,
195 XMMRegister scratch_reg,
196 Register result_reg) {
199 xorps(scratch_reg, scratch_reg);
200 cvtsd2si(result_reg, input_reg);
201 test(result_reg, Immediate(0xFFFFFF00));
202 j(zero, &done, Label::kNear);
203 cmp(result_reg, Immediate(0x1));
204 j(overflow, &conv_failure, Label::kNear);
205 mov(result_reg, Immediate(0));
206 setcc(sign, result_reg);
207 sub(result_reg, Immediate(1));
208 and_(result_reg, Immediate(255));
209 jmp(&done, Label::kNear);
211 Move(result_reg, Immediate(0));
212 ucomisd(input_reg, scratch_reg);
213 j(below, &done, Label::kNear);
214 Move(result_reg, Immediate(255));
219 void MacroAssembler::ClampUint8(Register reg) {
221 test(reg, Immediate(0xFFFFFF00));
222 j(zero, &done, Label::kNear);
223 setcc(negative, reg); // 1 if negative, 0 if positive.
224 dec_b(reg); // 0 if negative, 255 if positive.
229 void MacroAssembler::SlowTruncateToI(Register result_reg,
232 DoubleToIStub stub(isolate(), input_reg, result_reg, offset, true);
233 call(stub.GetCode(), RelocInfo::CODE_TARGET);
237 void MacroAssembler::TruncateDoubleToI(Register result_reg,
238 XMMRegister input_reg) {
240 cvttsd2si(result_reg, Operand(input_reg));
241 cmp(result_reg, 0x1);
242 j(no_overflow, &done, Label::kNear);
244 sub(esp, Immediate(kDoubleSize));
245 movsd(MemOperand(esp, 0), input_reg);
246 SlowTruncateToI(result_reg, esp, 0);
247 add(esp, Immediate(kDoubleSize));
252 void MacroAssembler::DoubleToI(Register result_reg,
253 XMMRegister input_reg,
255 MinusZeroMode minus_zero_mode,
256 Label* conversion_failed,
257 Label::Distance dst) {
258 ASSERT(!input_reg.is(scratch));
259 cvttsd2si(result_reg, Operand(input_reg));
260 Cvtsi2sd(scratch, Operand(result_reg));
261 ucomisd(scratch, input_reg);
262 j(not_equal, conversion_failed, dst);
263 j(parity_even, conversion_failed, dst); // NaN.
264 if (minus_zero_mode == FAIL_ON_MINUS_ZERO) {
266 // The integer converted back is equal to the original. We
267 // only have to test if we got -0 as an input.
268 test(result_reg, Operand(result_reg));
269 j(not_zero, &done, Label::kNear);
270 movmskpd(result_reg, input_reg);
271 // Bit 0 contains the sign of the double in input_reg.
272 // If input was positive, we are ok and return 0, otherwise
273 // jump to conversion_failed.
275 j(not_zero, conversion_failed, dst);
281 void MacroAssembler::TruncateHeapNumberToI(Register result_reg,
282 Register input_reg) {
283 Label done, slow_case;
285 if (CpuFeatures::IsSupported(SSE3)) {
286 CpuFeatureScope scope(this, SSE3);
288 // Use more powerful conversion when sse3 is available.
289 // Load x87 register with heap number.
290 fld_d(FieldOperand(input_reg, HeapNumber::kValueOffset));
291 // Get exponent alone and check for too-big exponent.
292 mov(result_reg, FieldOperand(input_reg, HeapNumber::kExponentOffset));
293 and_(result_reg, HeapNumber::kExponentMask);
294 const uint32_t kTooBigExponent =
295 (HeapNumber::kExponentBias + 63) << HeapNumber::kExponentShift;
296 cmp(Operand(result_reg), Immediate(kTooBigExponent));
297 j(greater_equal, &slow_case, Label::kNear);
299 // Reserve space for 64 bit answer.
300 sub(Operand(esp), Immediate(kDoubleSize));
301 // Do conversion, which cannot fail because we checked the exponent.
302 fisttp_d(Operand(esp, 0));
303 mov(result_reg, Operand(esp, 0)); // Low word of answer is the result.
304 add(Operand(esp), Immediate(kDoubleSize));
305 jmp(&done, Label::kNear);
309 if (input_reg.is(result_reg)) {
310 // Input is clobbered. Restore number from fpu stack
311 sub(Operand(esp), Immediate(kDoubleSize));
312 fstp_d(Operand(esp, 0));
313 SlowTruncateToI(result_reg, esp, 0);
314 add(esp, Immediate(kDoubleSize));
317 SlowTruncateToI(result_reg, input_reg);
320 movsd(xmm0, FieldOperand(input_reg, HeapNumber::kValueOffset));
321 cvttsd2si(result_reg, Operand(xmm0));
322 cmp(result_reg, 0x1);
323 j(no_overflow, &done, Label::kNear);
324 // Check if the input was 0x8000000 (kMinInt).
325 // If no, then we got an overflow and we deoptimize.
326 ExternalReference min_int = ExternalReference::address_of_min_int();
327 ucomisd(xmm0, Operand::StaticVariable(min_int));
328 j(not_equal, &slow_case, Label::kNear);
329 j(parity_even, &slow_case, Label::kNear); // NaN.
330 jmp(&done, Label::kNear);
334 if (input_reg.is(result_reg)) {
335 // Input is clobbered. Restore number from double scratch.
336 sub(esp, Immediate(kDoubleSize));
337 movsd(MemOperand(esp, 0), xmm0);
338 SlowTruncateToI(result_reg, esp, 0);
339 add(esp, Immediate(kDoubleSize));
341 SlowTruncateToI(result_reg, input_reg);
348 void MacroAssembler::TaggedToI(Register result_reg,
351 MinusZeroMode minus_zero_mode,
352 Label* lost_precision) {
354 ASSERT(!temp.is(xmm0));
356 cmp(FieldOperand(input_reg, HeapObject::kMapOffset),
357 isolate()->factory()->heap_number_map());
358 j(not_equal, lost_precision, Label::kNear);
360 ASSERT(!temp.is(no_xmm_reg));
362 movsd(xmm0, FieldOperand(input_reg, HeapNumber::kValueOffset));
363 cvttsd2si(result_reg, Operand(xmm0));
364 Cvtsi2sd(temp, Operand(result_reg));
366 RecordComment("Deferred TaggedToI: lost precision");
367 j(not_equal, lost_precision, Label::kNear);
368 RecordComment("Deferred TaggedToI: NaN");
369 j(parity_even, lost_precision, Label::kNear);
370 if (minus_zero_mode == FAIL_ON_MINUS_ZERO) {
371 test(result_reg, Operand(result_reg));
372 j(not_zero, &done, Label::kNear);
373 movmskpd(result_reg, xmm0);
375 RecordComment("Deferred TaggedToI: minus zero");
376 j(not_zero, lost_precision, Label::kNear);
382 void MacroAssembler::LoadUint32(XMMRegister dst,
385 cmp(src, Immediate(0));
386 ExternalReference uint32_bias =
387 ExternalReference::address_of_uint32_bias();
389 j(not_sign, &done, Label::kNear);
390 addsd(dst, Operand::StaticVariable(uint32_bias));
395 void MacroAssembler::RecordWriteArray(
399 SaveFPRegsMode save_fp,
400 RememberedSetAction remembered_set_action,
402 PointersToHereCheck pointers_to_here_check_for_value) {
403 // First, check if a write barrier is even needed. The tests below
404 // catch stores of Smis.
407 // Skip barrier if writing a smi.
408 if (smi_check == INLINE_SMI_CHECK) {
409 ASSERT_EQ(0, kSmiTag);
410 test(value, Immediate(kSmiTagMask));
414 // Array access: calculate the destination address in the same manner as
415 // KeyedStoreIC::GenerateGeneric. Multiply a smi by 2 to get an offset
416 // into an array of words.
417 Register dst = index;
418 lea(dst, Operand(object, index, times_half_pointer_size,
419 FixedArray::kHeaderSize - kHeapObjectTag));
421 RecordWrite(object, dst, value, save_fp, remembered_set_action,
422 OMIT_SMI_CHECK, pointers_to_here_check_for_value);
426 // Clobber clobbered input registers when running with the debug-code flag
427 // turned on to provoke errors.
428 if (emit_debug_code()) {
429 mov(value, Immediate(BitCast<int32_t>(kZapValue)));
430 mov(index, Immediate(BitCast<int32_t>(kZapValue)));
435 void MacroAssembler::RecordWriteField(
440 SaveFPRegsMode save_fp,
441 RememberedSetAction remembered_set_action,
443 PointersToHereCheck pointers_to_here_check_for_value) {
444 // First, check if a write barrier is even needed. The tests below
445 // catch stores of Smis.
448 // Skip barrier if writing a smi.
449 if (smi_check == INLINE_SMI_CHECK) {
450 JumpIfSmi(value, &done, Label::kNear);
453 // Although the object register is tagged, the offset is relative to the start
454 // of the object, so so offset must be a multiple of kPointerSize.
455 ASSERT(IsAligned(offset, kPointerSize));
457 lea(dst, FieldOperand(object, offset));
458 if (emit_debug_code()) {
460 test_b(dst, (1 << kPointerSizeLog2) - 1);
461 j(zero, &ok, Label::kNear);
466 RecordWrite(object, dst, value, save_fp, remembered_set_action,
467 OMIT_SMI_CHECK, pointers_to_here_check_for_value);
471 // Clobber clobbered input registers when running with the debug-code flag
472 // turned on to provoke errors.
473 if (emit_debug_code()) {
474 mov(value, Immediate(BitCast<int32_t>(kZapValue)));
475 mov(dst, Immediate(BitCast<int32_t>(kZapValue)));
480 void MacroAssembler::RecordWriteForMap(
485 SaveFPRegsMode save_fp) {
488 Register address = scratch1;
489 Register value = scratch2;
490 if (emit_debug_code()) {
492 lea(address, FieldOperand(object, HeapObject::kMapOffset));
493 test_b(address, (1 << kPointerSizeLog2) - 1);
494 j(zero, &ok, Label::kNear);
499 ASSERT(!object.is(value));
500 ASSERT(!object.is(address));
501 ASSERT(!value.is(address));
502 AssertNotSmi(object);
504 if (!FLAG_incremental_marking) {
508 // Compute the address.
509 lea(address, FieldOperand(object, HeapObject::kMapOffset));
511 // Count number of write barriers in generated code.
512 isolate()->counters()->write_barriers_static()->Increment();
513 IncrementCounter(isolate()->counters()->write_barriers_dynamic(), 1);
515 // A single check of the map's pages interesting flag suffices, since it is
516 // only set during incremental collection, and then it's also guaranteed that
517 // the from object's page's interesting flag is also set. This optimization
518 // relies on the fact that maps can never be in new space.
519 ASSERT(!isolate()->heap()->InNewSpace(*map));
520 CheckPageFlagForMap(map,
521 MemoryChunk::kPointersToHereAreInterestingMask,
526 RecordWriteStub stub(isolate(), object, value, address, OMIT_REMEMBERED_SET,
532 // Clobber clobbered input registers when running with the debug-code flag
533 // turned on to provoke errors.
534 if (emit_debug_code()) {
535 mov(value, Immediate(BitCast<int32_t>(kZapValue)));
536 mov(scratch1, Immediate(BitCast<int32_t>(kZapValue)));
537 mov(scratch2, Immediate(BitCast<int32_t>(kZapValue)));
542 void MacroAssembler::RecordWrite(
546 SaveFPRegsMode fp_mode,
547 RememberedSetAction remembered_set_action,
549 PointersToHereCheck pointers_to_here_check_for_value) {
550 ASSERT(!object.is(value));
551 ASSERT(!object.is(address));
552 ASSERT(!value.is(address));
553 AssertNotSmi(object);
555 if (remembered_set_action == OMIT_REMEMBERED_SET &&
556 !FLAG_incremental_marking) {
560 if (emit_debug_code()) {
562 cmp(value, Operand(address, 0));
563 j(equal, &ok, Label::kNear);
568 // Count number of write barriers in generated code.
569 isolate()->counters()->write_barriers_static()->Increment();
570 IncrementCounter(isolate()->counters()->write_barriers_dynamic(), 1);
572 // First, check if a write barrier is even needed. The tests below
573 // catch stores of Smis and stores into young gen.
576 if (smi_check == INLINE_SMI_CHECK) {
577 // Skip barrier if writing a smi.
578 JumpIfSmi(value, &done, Label::kNear);
581 if (pointers_to_here_check_for_value != kPointersToHereAreAlwaysInteresting) {
583 value, // Used as scratch.
584 MemoryChunk::kPointersToHereAreInterestingMask,
589 CheckPageFlag(object,
590 value, // Used as scratch.
591 MemoryChunk::kPointersFromHereAreInterestingMask,
596 RecordWriteStub stub(isolate(), object, value, address, remembered_set_action,
602 // Clobber clobbered registers when running with the debug-code flag
603 // turned on to provoke errors.
604 if (emit_debug_code()) {
605 mov(address, Immediate(BitCast<int32_t>(kZapValue)));
606 mov(value, Immediate(BitCast<int32_t>(kZapValue)));
611 void MacroAssembler::DebugBreak() {
612 Move(eax, Immediate(0));
613 mov(ebx, Immediate(ExternalReference(Runtime::kDebugBreak, isolate())));
614 CEntryStub ces(isolate(), 1);
615 call(ces.GetCode(), RelocInfo::DEBUG_BREAK);
619 void MacroAssembler::Cvtsi2sd(XMMRegister dst, const Operand& src) {
625 bool MacroAssembler::IsUnsafeImmediate(const Immediate& x) {
626 static const int kMaxImmediateBits = 17;
627 if (!RelocInfo::IsNone(x.rmode_)) return false;
628 return !is_intn(x.x_, kMaxImmediateBits);
632 void MacroAssembler::SafeMove(Register dst, const Immediate& x) {
633 if (IsUnsafeImmediate(x) && jit_cookie() != 0) {
634 Move(dst, Immediate(x.x_ ^ jit_cookie()));
635 xor_(dst, jit_cookie());
642 void MacroAssembler::SafePush(const Immediate& x) {
643 if (IsUnsafeImmediate(x) && jit_cookie() != 0) {
644 push(Immediate(x.x_ ^ jit_cookie()));
645 xor_(Operand(esp, 0), Immediate(jit_cookie()));
652 void MacroAssembler::CmpObjectType(Register heap_object,
655 mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
656 CmpInstanceType(map, type);
660 void MacroAssembler::CmpInstanceType(Register map, InstanceType type) {
661 cmpb(FieldOperand(map, Map::kInstanceTypeOffset),
662 static_cast<int8_t>(type));
666 void MacroAssembler::CheckFastElements(Register map,
668 Label::Distance distance) {
669 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
670 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
671 STATIC_ASSERT(FAST_ELEMENTS == 2);
672 STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3);
673 cmpb(FieldOperand(map, Map::kBitField2Offset),
674 Map::kMaximumBitField2FastHoleyElementValue);
675 j(above, fail, distance);
679 void MacroAssembler::CheckFastObjectElements(Register map,
681 Label::Distance distance) {
682 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
683 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
684 STATIC_ASSERT(FAST_ELEMENTS == 2);
685 STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3);
686 cmpb(FieldOperand(map, Map::kBitField2Offset),
687 Map::kMaximumBitField2FastHoleySmiElementValue);
688 j(below_equal, fail, distance);
689 cmpb(FieldOperand(map, Map::kBitField2Offset),
690 Map::kMaximumBitField2FastHoleyElementValue);
691 j(above, fail, distance);
695 void MacroAssembler::CheckFastSmiElements(Register map,
697 Label::Distance distance) {
698 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
699 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
700 cmpb(FieldOperand(map, Map::kBitField2Offset),
701 Map::kMaximumBitField2FastHoleySmiElementValue);
702 j(above, fail, distance);
706 void MacroAssembler::StoreNumberToDoubleElements(
707 Register maybe_number,
711 XMMRegister scratch2,
713 int elements_offset) {
714 Label smi_value, done, maybe_nan, not_nan, is_nan, have_double_value;
715 JumpIfSmi(maybe_number, &smi_value, Label::kNear);
717 CheckMap(maybe_number,
718 isolate()->factory()->heap_number_map(),
722 // Double value, canonicalize NaN.
723 uint32_t offset = HeapNumber::kValueOffset + sizeof(kHoleNanLower32);
724 cmp(FieldOperand(maybe_number, offset),
725 Immediate(kNaNOrInfinityLowerBoundUpper32));
726 j(greater_equal, &maybe_nan, Label::kNear);
729 ExternalReference canonical_nan_reference =
730 ExternalReference::address_of_canonical_non_hole_nan();
731 movsd(scratch2, FieldOperand(maybe_number, HeapNumber::kValueOffset));
732 bind(&have_double_value);
733 movsd(FieldOperand(elements, key, times_4,
734 FixedDoubleArray::kHeaderSize - elements_offset),
739 // Could be NaN or Infinity. If fraction is not zero, it's NaN, otherwise
740 // it's an Infinity, and the non-NaN code path applies.
741 j(greater, &is_nan, Label::kNear);
742 cmp(FieldOperand(maybe_number, HeapNumber::kValueOffset), Immediate(0));
745 movsd(scratch2, Operand::StaticVariable(canonical_nan_reference));
746 jmp(&have_double_value, Label::kNear);
749 // Value is a smi. Convert to a double and store.
750 // Preserve original value.
751 mov(scratch1, maybe_number);
753 Cvtsi2sd(scratch2, scratch1);
754 movsd(FieldOperand(elements, key, times_4,
755 FixedDoubleArray::kHeaderSize - elements_offset),
761 void MacroAssembler::CompareMap(Register obj, Handle<Map> map) {
762 cmp(FieldOperand(obj, HeapObject::kMapOffset), map);
766 void MacroAssembler::CheckMap(Register obj,
769 SmiCheckType smi_check_type) {
770 if (smi_check_type == DO_SMI_CHECK) {
771 JumpIfSmi(obj, fail);
774 CompareMap(obj, map);
779 void MacroAssembler::DispatchMap(Register obj,
782 Handle<Code> success,
783 SmiCheckType smi_check_type) {
785 if (smi_check_type == DO_SMI_CHECK) {
786 JumpIfSmi(obj, &fail);
788 cmp(FieldOperand(obj, HeapObject::kMapOffset), Immediate(map));
795 Condition MacroAssembler::IsObjectStringType(Register heap_object,
797 Register instance_type) {
798 mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
799 movzx_b(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
800 STATIC_ASSERT(kNotStringTag != 0);
801 test(instance_type, Immediate(kIsNotStringMask));
806 Condition MacroAssembler::IsObjectNameType(Register heap_object,
808 Register instance_type) {
809 mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
810 movzx_b(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
811 cmpb(instance_type, static_cast<uint8_t>(LAST_NAME_TYPE));
816 void MacroAssembler::IsObjectJSObjectType(Register heap_object,
820 mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
821 IsInstanceJSObjectType(map, scratch, fail);
825 void MacroAssembler::IsInstanceJSObjectType(Register map,
828 movzx_b(scratch, FieldOperand(map, Map::kInstanceTypeOffset));
829 sub(scratch, Immediate(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
831 LAST_NONCALLABLE_SPEC_OBJECT_TYPE - FIRST_NONCALLABLE_SPEC_OBJECT_TYPE);
836 void MacroAssembler::FCmp() {
842 void MacroAssembler::AssertNumber(Register object) {
843 if (emit_debug_code()) {
845 JumpIfSmi(object, &ok);
846 cmp(FieldOperand(object, HeapObject::kMapOffset),
847 isolate()->factory()->heap_number_map());
848 Check(equal, kOperandNotANumber);
854 void MacroAssembler::AssertSmi(Register object) {
855 if (emit_debug_code()) {
856 test(object, Immediate(kSmiTagMask));
857 Check(equal, kOperandIsNotASmi);
862 void MacroAssembler::AssertString(Register object) {
863 if (emit_debug_code()) {
864 test(object, Immediate(kSmiTagMask));
865 Check(not_equal, kOperandIsASmiAndNotAString);
867 mov(object, FieldOperand(object, HeapObject::kMapOffset));
868 CmpInstanceType(object, FIRST_NONSTRING_TYPE);
870 Check(below, kOperandIsNotAString);
875 void MacroAssembler::AssertName(Register object) {
876 if (emit_debug_code()) {
877 test(object, Immediate(kSmiTagMask));
878 Check(not_equal, kOperandIsASmiAndNotAName);
880 mov(object, FieldOperand(object, HeapObject::kMapOffset));
881 CmpInstanceType(object, LAST_NAME_TYPE);
883 Check(below_equal, kOperandIsNotAName);
888 void MacroAssembler::AssertUndefinedOrAllocationSite(Register object) {
889 if (emit_debug_code()) {
891 AssertNotSmi(object);
892 cmp(object, isolate()->factory()->undefined_value());
893 j(equal, &done_checking);
894 cmp(FieldOperand(object, 0),
895 Immediate(isolate()->factory()->allocation_site_map()));
896 Assert(equal, kExpectedUndefinedOrCell);
897 bind(&done_checking);
902 void MacroAssembler::AssertNotSmi(Register object) {
903 if (emit_debug_code()) {
904 test(object, Immediate(kSmiTagMask));
905 Check(not_equal, kOperandIsASmi);
910 void MacroAssembler::StubPrologue() {
911 push(ebp); // Caller's frame pointer.
913 push(esi); // Callee's context.
914 push(Immediate(Smi::FromInt(StackFrame::STUB)));
918 void MacroAssembler::Prologue(bool code_pre_aging) {
919 PredictableCodeSizeScope predictible_code_size_scope(this,
920 kNoCodeAgeSequenceLength);
921 if (code_pre_aging) {
923 call(isolate()->builtins()->MarkCodeAsExecutedOnce(),
924 RelocInfo::CODE_AGE_SEQUENCE);
925 Nop(kNoCodeAgeSequenceLength - Assembler::kCallInstructionLength);
927 push(ebp); // Caller's frame pointer.
929 push(esi); // Callee's context.
930 push(edi); // Callee's JS function.
935 void MacroAssembler::EnterFrame(StackFrame::Type type) {
939 push(Immediate(Smi::FromInt(type)));
940 push(Immediate(CodeObject()));
941 if (emit_debug_code()) {
942 cmp(Operand(esp, 0), Immediate(isolate()->factory()->undefined_value()));
943 Check(not_equal, kCodeObjectNotProperlyPatched);
948 void MacroAssembler::LeaveFrame(StackFrame::Type type) {
949 if (emit_debug_code()) {
950 cmp(Operand(ebp, StandardFrameConstants::kMarkerOffset),
951 Immediate(Smi::FromInt(type)));
952 Check(equal, kStackFrameTypesMustMatch);
958 void MacroAssembler::EnterExitFramePrologue() {
959 // Set up the frame structure on the stack.
960 ASSERT(ExitFrameConstants::kCallerSPDisplacement == +2 * kPointerSize);
961 ASSERT(ExitFrameConstants::kCallerPCOffset == +1 * kPointerSize);
962 ASSERT(ExitFrameConstants::kCallerFPOffset == 0 * kPointerSize);
966 // Reserve room for entry stack pointer and push the code object.
967 ASSERT(ExitFrameConstants::kSPOffset == -1 * kPointerSize);
968 push(Immediate(0)); // Saved entry sp, patched before call.
969 push(Immediate(CodeObject())); // Accessed from ExitFrame::code_slot.
971 // Save the frame pointer and the context in top.
972 ExternalReference c_entry_fp_address(Isolate::kCEntryFPAddress, isolate());
973 ExternalReference context_address(Isolate::kContextAddress, isolate());
974 mov(Operand::StaticVariable(c_entry_fp_address), ebp);
975 mov(Operand::StaticVariable(context_address), esi);
979 void MacroAssembler::EnterExitFrameEpilogue(int argc, bool save_doubles) {
980 // Optionally save all XMM registers.
982 int space = XMMRegister::kMaxNumRegisters * kSIMD128Size +
984 sub(esp, Immediate(space));
985 const int offset = -2 * kPointerSize;
986 for (int i = 0; i < XMMRegister::kMaxNumRegisters; i++) {
987 XMMRegister reg = XMMRegister::from_code(i);
988 movups(Operand(ebp, offset - ((i + 1) * kSIMD128Size)), reg);
991 sub(esp, Immediate(argc * kPointerSize));
994 // Get the required frame alignment for the OS.
995 const int kFrameAlignment = OS::ActivationFrameAlignment();
996 if (kFrameAlignment > 0) {
997 ASSERT(IsPowerOf2(kFrameAlignment));
998 and_(esp, -kFrameAlignment);
1001 // Patch the saved entry sp.
1002 mov(Operand(ebp, ExitFrameConstants::kSPOffset), esp);
1006 void MacroAssembler::EnterExitFrame(bool save_doubles) {
1007 EnterExitFramePrologue();
1009 // Set up argc and argv in callee-saved registers.
1010 int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
1012 lea(esi, Operand(ebp, eax, times_4, offset));
1014 // Reserve space for argc, argv and isolate.
1015 EnterExitFrameEpilogue(3, save_doubles);
1019 void MacroAssembler::EnterApiExitFrame(int argc) {
1020 EnterExitFramePrologue();
1021 EnterExitFrameEpilogue(argc, false);
1025 void MacroAssembler::LeaveExitFrame(bool save_doubles) {
1026 // Optionally restore all XMM registers.
1028 const int offset = -2 * kPointerSize;
1029 for (int i = 0; i < XMMRegister::kMaxNumRegisters; i++) {
1030 XMMRegister reg = XMMRegister::from_code(i);
1031 movups(reg, Operand(ebp, offset - ((i + 1) * kSIMD128Size)));
1035 // Get the return address from the stack and restore the frame pointer.
1036 mov(ecx, Operand(ebp, 1 * kPointerSize));
1037 mov(ebp, Operand(ebp, 0 * kPointerSize));
1039 // Pop the arguments and the receiver from the caller stack.
1040 lea(esp, Operand(esi, 1 * kPointerSize));
1042 // Push the return address to get ready to return.
1045 LeaveExitFrameEpilogue(true);
1049 void MacroAssembler::LeaveExitFrameEpilogue(bool restore_context) {
1050 // Restore current context from top and clear it in debug mode.
1051 ExternalReference context_address(Isolate::kContextAddress, isolate());
1052 if (restore_context) {
1053 mov(esi, Operand::StaticVariable(context_address));
1056 mov(Operand::StaticVariable(context_address), Immediate(0));
1059 // Clear the top frame.
1060 ExternalReference c_entry_fp_address(Isolate::kCEntryFPAddress,
1062 mov(Operand::StaticVariable(c_entry_fp_address), Immediate(0));
1066 void MacroAssembler::LeaveApiExitFrame(bool restore_context) {
1070 LeaveExitFrameEpilogue(restore_context);
1074 void MacroAssembler::PushTryHandler(StackHandler::Kind kind,
1075 int handler_index) {
1076 // Adjust this code if not the case.
1077 STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize);
1078 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
1079 STATIC_ASSERT(StackHandlerConstants::kCodeOffset == 1 * kPointerSize);
1080 STATIC_ASSERT(StackHandlerConstants::kStateOffset == 2 * kPointerSize);
1081 STATIC_ASSERT(StackHandlerConstants::kContextOffset == 3 * kPointerSize);
1082 STATIC_ASSERT(StackHandlerConstants::kFPOffset == 4 * kPointerSize);
1084 // We will build up the handler from the bottom by pushing on the stack.
1085 // First push the frame pointer and context.
1086 if (kind == StackHandler::JS_ENTRY) {
1087 // The frame pointer does not point to a JS frame so we save NULL for
1088 // ebp. We expect the code throwing an exception to check ebp before
1089 // dereferencing it to restore the context.
1090 push(Immediate(0)); // NULL frame pointer.
1091 push(Immediate(Smi::FromInt(0))); // No context.
1096 // Push the state and the code object.
1098 StackHandler::IndexField::encode(handler_index) |
1099 StackHandler::KindField::encode(kind);
1100 push(Immediate(state));
1103 // Link the current handler as the next handler.
1104 ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
1105 push(Operand::StaticVariable(handler_address));
1106 // Set this new handler as the current one.
1107 mov(Operand::StaticVariable(handler_address), esp);
1111 void MacroAssembler::PopTryHandler() {
1112 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
1113 ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
1114 pop(Operand::StaticVariable(handler_address));
1115 add(esp, Immediate(StackHandlerConstants::kSize - kPointerSize));
1119 void MacroAssembler::JumpToHandlerEntry() {
1120 // Compute the handler entry address and jump to it. The handler table is
1121 // a fixed array of (smi-tagged) code offsets.
1122 // eax = exception, edi = code object, edx = state.
1123 mov(ebx, FieldOperand(edi, Code::kHandlerTableOffset));
1124 shr(edx, StackHandler::kKindWidth);
1125 mov(edx, FieldOperand(ebx, edx, times_4, FixedArray::kHeaderSize));
1127 lea(edi, FieldOperand(edi, edx, times_1, Code::kHeaderSize));
1132 void MacroAssembler::Throw(Register value) {
1133 // Adjust this code if not the case.
1134 STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize);
1135 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
1136 STATIC_ASSERT(StackHandlerConstants::kCodeOffset == 1 * kPointerSize);
1137 STATIC_ASSERT(StackHandlerConstants::kStateOffset == 2 * kPointerSize);
1138 STATIC_ASSERT(StackHandlerConstants::kContextOffset == 3 * kPointerSize);
1139 STATIC_ASSERT(StackHandlerConstants::kFPOffset == 4 * kPointerSize);
1141 // The exception is expected in eax.
1142 if (!value.is(eax)) {
1145 // Drop the stack pointer to the top of the top handler.
1146 ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
1147 mov(esp, Operand::StaticVariable(handler_address));
1148 // Restore the next handler.
1149 pop(Operand::StaticVariable(handler_address));
1151 // Remove the code object and state, compute the handler address in edi.
1152 pop(edi); // Code object.
1153 pop(edx); // Index and state.
1155 // Restore the context and frame pointer.
1156 pop(esi); // Context.
1157 pop(ebp); // Frame pointer.
1159 // If the handler is a JS frame, restore the context to the frame.
1160 // (kind == ENTRY) == (ebp == 0) == (esi == 0), so we could test either
1164 j(zero, &skip, Label::kNear);
1165 mov(Operand(ebp, StandardFrameConstants::kContextOffset), esi);
1168 JumpToHandlerEntry();
1172 void MacroAssembler::ThrowUncatchable(Register value) {
1173 // Adjust this code if not the case.
1174 STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize);
1175 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
1176 STATIC_ASSERT(StackHandlerConstants::kCodeOffset == 1 * kPointerSize);
1177 STATIC_ASSERT(StackHandlerConstants::kStateOffset == 2 * kPointerSize);
1178 STATIC_ASSERT(StackHandlerConstants::kContextOffset == 3 * kPointerSize);
1179 STATIC_ASSERT(StackHandlerConstants::kFPOffset == 4 * kPointerSize);
1181 // The exception is expected in eax.
1182 if (!value.is(eax)) {
1185 // Drop the stack pointer to the top of the top stack handler.
1186 ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
1187 mov(esp, Operand::StaticVariable(handler_address));
1189 // Unwind the handlers until the top ENTRY handler is found.
1190 Label fetch_next, check_kind;
1191 jmp(&check_kind, Label::kNear);
1193 mov(esp, Operand(esp, StackHandlerConstants::kNextOffset));
1196 STATIC_ASSERT(StackHandler::JS_ENTRY == 0);
1197 test(Operand(esp, StackHandlerConstants::kStateOffset),
1198 Immediate(StackHandler::KindField::kMask));
1199 j(not_zero, &fetch_next);
1201 // Set the top handler address to next handler past the top ENTRY handler.
1202 pop(Operand::StaticVariable(handler_address));
1204 // Remove the code object and state, compute the handler address in edi.
1205 pop(edi); // Code object.
1206 pop(edx); // Index and state.
1208 // Clear the context pointer and frame pointer (0 was saved in the handler).
1212 JumpToHandlerEntry();
1216 void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
1220 Label same_contexts;
1222 ASSERT(!holder_reg.is(scratch1));
1223 ASSERT(!holder_reg.is(scratch2));
1224 ASSERT(!scratch1.is(scratch2));
1226 // Load current lexical context from the stack frame.
1227 mov(scratch1, Operand(ebp, StandardFrameConstants::kContextOffset));
1229 // When generating debug code, make sure the lexical context is set.
1230 if (emit_debug_code()) {
1231 cmp(scratch1, Immediate(0));
1232 Check(not_equal, kWeShouldNotHaveAnEmptyLexicalContext);
1234 // Load the native context of the current context.
1236 Context::kHeaderSize + Context::GLOBAL_OBJECT_INDEX * kPointerSize;
1237 mov(scratch1, FieldOperand(scratch1, offset));
1238 mov(scratch1, FieldOperand(scratch1, GlobalObject::kNativeContextOffset));
1240 // Check the context is a native context.
1241 if (emit_debug_code()) {
1242 // Read the first word and compare to native_context_map.
1243 cmp(FieldOperand(scratch1, HeapObject::kMapOffset),
1244 isolate()->factory()->native_context_map());
1245 Check(equal, kJSGlobalObjectNativeContextShouldBeANativeContext);
1248 // Check if both contexts are the same.
1249 cmp(scratch1, FieldOperand(holder_reg, JSGlobalProxy::kNativeContextOffset));
1250 j(equal, &same_contexts);
1252 // Compare security tokens, save holder_reg on the stack so we can use it
1253 // as a temporary register.
1255 // Check that the security token in the calling global object is
1256 // compatible with the security token in the receiving global
1259 FieldOperand(holder_reg, JSGlobalProxy::kNativeContextOffset));
1261 // Check the context is a native context.
1262 if (emit_debug_code()) {
1263 cmp(scratch2, isolate()->factory()->null_value());
1264 Check(not_equal, kJSGlobalProxyContextShouldNotBeNull);
1266 // Read the first word and compare to native_context_map(),
1267 cmp(FieldOperand(scratch2, HeapObject::kMapOffset),
1268 isolate()->factory()->native_context_map());
1269 Check(equal, kJSGlobalObjectNativeContextShouldBeANativeContext);
1272 int token_offset = Context::kHeaderSize +
1273 Context::SECURITY_TOKEN_INDEX * kPointerSize;
1274 mov(scratch1, FieldOperand(scratch1, token_offset));
1275 cmp(scratch1, FieldOperand(scratch2, token_offset));
1278 bind(&same_contexts);
1282 // Compute the hash code from the untagged key. This must be kept in sync with
1283 // ComputeIntegerHash in utils.h and KeyedLoadGenericElementStub in
1284 // code-stub-hydrogen.cc
1286 // Note: r0 will contain hash code
1287 void MacroAssembler::GetNumberHash(Register r0, Register scratch) {
1288 // Xor original key with a seed.
1289 if (serializer_enabled()) {
1290 ExternalReference roots_array_start =
1291 ExternalReference::roots_array_start(isolate());
1292 mov(scratch, Immediate(Heap::kHashSeedRootIndex));
1294 Operand::StaticArray(scratch, times_pointer_size, roots_array_start));
1298 int32_t seed = isolate()->heap()->HashSeed();
1299 xor_(r0, Immediate(seed));
1302 // hash = ~hash + (hash << 15);
1307 // hash = hash ^ (hash >> 12);
1311 // hash = hash + (hash << 2);
1312 lea(r0, Operand(r0, r0, times_4, 0));
1313 // hash = hash ^ (hash >> 4);
1317 // hash = hash * 2057;
1319 // hash = hash ^ (hash >> 16);
1327 void MacroAssembler::LoadFromNumberDictionary(Label* miss,
1336 // elements - holds the slow-case elements of the receiver and is unchanged.
1338 // key - holds the smi key on entry and is unchanged.
1340 // Scratch registers:
1342 // r0 - holds the untagged key on entry and holds the hash once computed.
1344 // r1 - used to hold the capacity mask of the dictionary
1346 // r2 - used for the index into the dictionary.
1348 // result - holds the result on exit if the load succeeds and we fall through.
1352 GetNumberHash(r0, r1);
1354 // Compute capacity mask.
1355 mov(r1, FieldOperand(elements, SeededNumberDictionary::kCapacityOffset));
1356 shr(r1, kSmiTagSize); // convert smi to int
1359 // Generate an unrolled loop that performs a few probes before giving up.
1360 for (int i = 0; i < kNumberDictionaryProbes; i++) {
1361 // Use r2 for index calculations and keep the hash intact in r0.
1363 // Compute the masked index: (hash + i + i * i) & mask.
1365 add(r2, Immediate(SeededNumberDictionary::GetProbeOffset(i)));
1369 // Scale the index by multiplying by the entry size.
1370 ASSERT(SeededNumberDictionary::kEntrySize == 3);
1371 lea(r2, Operand(r2, r2, times_2, 0)); // r2 = r2 * 3
1373 // Check if the key matches.
1374 cmp(key, FieldOperand(elements,
1377 SeededNumberDictionary::kElementsStartOffset));
1378 if (i != (kNumberDictionaryProbes - 1)) {
1386 // Check that the value is a normal propety.
1387 const int kDetailsOffset =
1388 SeededNumberDictionary::kElementsStartOffset + 2 * kPointerSize;
1389 ASSERT_EQ(NORMAL, 0);
1390 test(FieldOperand(elements, r2, times_pointer_size, kDetailsOffset),
1391 Immediate(PropertyDetails::TypeField::kMask << kSmiTagSize));
1394 // Get the value at the masked, scaled index.
1395 const int kValueOffset =
1396 SeededNumberDictionary::kElementsStartOffset + kPointerSize;
1397 mov(result, FieldOperand(elements, r2, times_pointer_size, kValueOffset));
1401 void MacroAssembler::LoadAllocationTopHelper(Register result,
1403 AllocationFlags flags) {
1404 ExternalReference allocation_top =
1405 AllocationUtils::GetAllocationTopReference(isolate(), flags);
1407 // Just return if allocation top is already known.
1408 if ((flags & RESULT_CONTAINS_TOP) != 0) {
1409 // No use of scratch if allocation top is provided.
1410 ASSERT(scratch.is(no_reg));
1412 // Assert that result actually contains top on entry.
1413 cmp(result, Operand::StaticVariable(allocation_top));
1414 Check(equal, kUnexpectedAllocationTop);
1419 // Move address of new object to result. Use scratch register if available.
1420 if (scratch.is(no_reg)) {
1421 mov(result, Operand::StaticVariable(allocation_top));
1423 mov(scratch, Immediate(allocation_top));
1424 mov(result, Operand(scratch, 0));
1429 void MacroAssembler::UpdateAllocationTopHelper(Register result_end,
1431 AllocationFlags flags) {
1432 if (emit_debug_code()) {
1433 test(result_end, Immediate(kObjectAlignmentMask));
1434 Check(zero, kUnalignedAllocationInNewSpace);
1437 ExternalReference allocation_top =
1438 AllocationUtils::GetAllocationTopReference(isolate(), flags);
1440 // Update new top. Use scratch if available.
1441 if (scratch.is(no_reg)) {
1442 mov(Operand::StaticVariable(allocation_top), result_end);
1444 mov(Operand(scratch, 0), result_end);
1449 void MacroAssembler::Allocate(int object_size,
1451 Register result_end,
1454 AllocationFlags flags) {
1455 ASSERT((flags & (RESULT_CONTAINS_TOP | SIZE_IN_WORDS)) == 0);
1456 ASSERT(object_size <= Page::kMaxRegularHeapObjectSize);
1457 if (!FLAG_inline_new) {
1458 if (emit_debug_code()) {
1459 // Trash the registers to simulate an allocation failure.
1460 mov(result, Immediate(0x7091));
1461 if (result_end.is_valid()) {
1462 mov(result_end, Immediate(0x7191));
1464 if (scratch.is_valid()) {
1465 mov(scratch, Immediate(0x7291));
1471 ASSERT(!result.is(result_end));
1473 // Load address of new object into result.
1474 LoadAllocationTopHelper(result, scratch, flags);
1476 ExternalReference allocation_limit =
1477 AllocationUtils::GetAllocationLimitReference(isolate(), flags);
1479 // Align the next allocation. Storing the filler map without checking top is
1480 // safe in new-space because the limit of the heap is aligned there.
1481 if ((flags & DOUBLE_ALIGNMENT) != 0) {
1482 ASSERT((flags & PRETENURE_OLD_POINTER_SPACE) == 0);
1483 ASSERT(kPointerAlignment * 2 == kDoubleAlignment);
1485 test(result, Immediate(kDoubleAlignmentMask));
1486 j(zero, &aligned, Label::kNear);
1487 if ((flags & PRETENURE_OLD_DATA_SPACE) != 0) {
1488 cmp(result, Operand::StaticVariable(allocation_limit));
1489 j(above_equal, gc_required);
1491 mov(Operand(result, 0),
1492 Immediate(isolate()->factory()->one_pointer_filler_map()));
1493 add(result, Immediate(kDoubleSize / 2));
1497 // Calculate new top and bail out if space is exhausted.
1498 Register top_reg = result_end.is_valid() ? result_end : result;
1499 if (!top_reg.is(result)) {
1500 mov(top_reg, result);
1502 add(top_reg, Immediate(object_size));
1503 j(carry, gc_required);
1504 cmp(top_reg, Operand::StaticVariable(allocation_limit));
1505 j(above, gc_required);
1507 // Update allocation top.
1508 UpdateAllocationTopHelper(top_reg, scratch, flags);
1510 // Tag result if requested.
1511 bool tag_result = (flags & TAG_OBJECT) != 0;
1512 if (top_reg.is(result)) {
1514 sub(result, Immediate(object_size - kHeapObjectTag));
1516 sub(result, Immediate(object_size));
1518 } else if (tag_result) {
1519 ASSERT(kHeapObjectTag == 1);
1525 void MacroAssembler::Allocate(int header_size,
1526 ScaleFactor element_size,
1527 Register element_count,
1528 RegisterValueType element_count_type,
1530 Register result_end,
1533 AllocationFlags flags) {
1534 ASSERT((flags & SIZE_IN_WORDS) == 0);
1535 if (!FLAG_inline_new) {
1536 if (emit_debug_code()) {
1537 // Trash the registers to simulate an allocation failure.
1538 mov(result, Immediate(0x7091));
1539 mov(result_end, Immediate(0x7191));
1540 if (scratch.is_valid()) {
1541 mov(scratch, Immediate(0x7291));
1543 // Register element_count is not modified by the function.
1548 ASSERT(!result.is(result_end));
1550 // Load address of new object into result.
1551 LoadAllocationTopHelper(result, scratch, flags);
1553 ExternalReference allocation_limit =
1554 AllocationUtils::GetAllocationLimitReference(isolate(), flags);
1556 // Align the next allocation. Storing the filler map without checking top is
1557 // safe in new-space because the limit of the heap is aligned there.
1558 if ((flags & DOUBLE_ALIGNMENT) != 0) {
1559 ASSERT((flags & PRETENURE_OLD_POINTER_SPACE) == 0);
1560 ASSERT(kPointerAlignment * 2 == kDoubleAlignment);
1562 test(result, Immediate(kDoubleAlignmentMask));
1563 j(zero, &aligned, Label::kNear);
1564 if ((flags & PRETENURE_OLD_DATA_SPACE) != 0) {
1565 cmp(result, Operand::StaticVariable(allocation_limit));
1566 j(above_equal, gc_required);
1568 mov(Operand(result, 0),
1569 Immediate(isolate()->factory()->one_pointer_filler_map()));
1570 add(result, Immediate(kDoubleSize / 2));
1574 // Calculate new top and bail out if space is exhausted.
1575 // We assume that element_count*element_size + header_size does not
1577 if (element_count_type == REGISTER_VALUE_IS_SMI) {
1578 STATIC_ASSERT(static_cast<ScaleFactor>(times_2 - 1) == times_1);
1579 STATIC_ASSERT(static_cast<ScaleFactor>(times_4 - 1) == times_2);
1580 STATIC_ASSERT(static_cast<ScaleFactor>(times_8 - 1) == times_4);
1581 ASSERT(element_size >= times_2);
1582 ASSERT(kSmiTagSize == 1);
1583 element_size = static_cast<ScaleFactor>(element_size - 1);
1585 ASSERT(element_count_type == REGISTER_VALUE_IS_INT32);
1587 lea(result_end, Operand(element_count, element_size, header_size));
1588 add(result_end, result);
1589 j(carry, gc_required);
1590 cmp(result_end, Operand::StaticVariable(allocation_limit));
1591 j(above, gc_required);
1593 if ((flags & TAG_OBJECT) != 0) {
1594 ASSERT(kHeapObjectTag == 1);
1598 // Update allocation top.
1599 UpdateAllocationTopHelper(result_end, scratch, flags);
1603 void MacroAssembler::Allocate(Register object_size,
1605 Register result_end,
1608 AllocationFlags flags) {
1609 ASSERT((flags & (RESULT_CONTAINS_TOP | SIZE_IN_WORDS)) == 0);
1610 if (!FLAG_inline_new) {
1611 if (emit_debug_code()) {
1612 // Trash the registers to simulate an allocation failure.
1613 mov(result, Immediate(0x7091));
1614 mov(result_end, Immediate(0x7191));
1615 if (scratch.is_valid()) {
1616 mov(scratch, Immediate(0x7291));
1618 // object_size is left unchanged by this function.
1623 ASSERT(!result.is(result_end));
1625 // Load address of new object into result.
1626 LoadAllocationTopHelper(result, scratch, flags);
1628 ExternalReference allocation_limit =
1629 AllocationUtils::GetAllocationLimitReference(isolate(), flags);
1631 // Align the next allocation. Storing the filler map without checking top is
1632 // safe in new-space because the limit of the heap is aligned there.
1633 if ((flags & DOUBLE_ALIGNMENT) != 0) {
1634 ASSERT((flags & PRETENURE_OLD_POINTER_SPACE) == 0);
1635 ASSERT(kPointerAlignment * 2 == kDoubleAlignment);
1637 test(result, Immediate(kDoubleAlignmentMask));
1638 j(zero, &aligned, Label::kNear);
1639 if ((flags & PRETENURE_OLD_DATA_SPACE) != 0) {
1640 cmp(result, Operand::StaticVariable(allocation_limit));
1641 j(above_equal, gc_required);
1643 mov(Operand(result, 0),
1644 Immediate(isolate()->factory()->one_pointer_filler_map()));
1645 add(result, Immediate(kDoubleSize / 2));
1649 // Calculate new top and bail out if space is exhausted.
1650 if (!object_size.is(result_end)) {
1651 mov(result_end, object_size);
1653 add(result_end, result);
1654 j(carry, gc_required);
1655 cmp(result_end, Operand::StaticVariable(allocation_limit));
1656 j(above, gc_required);
1658 // Tag result if requested.
1659 if ((flags & TAG_OBJECT) != 0) {
1660 ASSERT(kHeapObjectTag == 1);
1664 // Update allocation top.
1665 UpdateAllocationTopHelper(result_end, scratch, flags);
1669 void MacroAssembler::UndoAllocationInNewSpace(Register object) {
1670 ExternalReference new_space_allocation_top =
1671 ExternalReference::new_space_allocation_top_address(isolate());
1673 // Make sure the object has no tag before resetting top.
1674 and_(object, Immediate(~kHeapObjectTagMask));
1676 cmp(object, Operand::StaticVariable(new_space_allocation_top));
1677 Check(below, kUndoAllocationOfNonAllocatedMemory);
1679 mov(Operand::StaticVariable(new_space_allocation_top), object);
1683 void MacroAssembler::AllocateHeapNumber(Register result,
1686 Label* gc_required) {
1687 // Allocate heap number in new space.
1688 Allocate(HeapNumber::kSize, result, scratch1, scratch2, gc_required,
1692 mov(FieldOperand(result, HeapObject::kMapOffset),
1693 Immediate(isolate()->factory()->heap_number_map()));
1697 #define SIMD128_HEAP_ALLOCATE_FUNCTIONS(V) \
1698 V(Float32x4, float32x4) \
1699 V(Float64x2, float64x2) \
1702 #define DECLARE_SIMD_HEAP_ALLOCATE_FUNCTION(TYPE, type) \
1703 void MacroAssembler::Allocate##TYPE(Register result, \
1704 Register scratch1, \
1705 Register scratch2, \
1706 Label* gc_required) { \
1707 /* Allocate SIMD128 object */ \
1708 Allocate(TYPE::kSize, result, scratch1, no_reg, gc_required, TAG_OBJECT);\
1710 mov(FieldOperand(result, JSObject::kMapOffset), \
1711 Immediate(reinterpret_cast<intptr_t>( \
1712 isolate()->native_context()->type##_function()->initial_map())));\
1713 mov(FieldOperand(result, JSObject::kPropertiesOffset), \
1714 Immediate(isolate()->factory()->empty_fixed_array())); \
1715 mov(FieldOperand(result, JSObject::kElementsOffset), \
1716 Immediate(isolate()->factory()->empty_fixed_array())); \
1717 /* Allocate FixedTypedArray object */ \
1718 Allocate(FixedTypedArrayBase::kDataOffset + k##TYPE##Size, \
1719 scratch1, scratch2, no_reg, gc_required, TAG_OBJECT); \
1721 mov(FieldOperand(scratch1, FixedTypedArrayBase::kMapOffset), \
1722 Immediate(isolate()->factory()->fixed_##type##_array_map())); \
1723 mov(scratch2, Immediate(1)); \
1725 mov(FieldOperand(scratch1, FixedTypedArrayBase::kLengthOffset), \
1727 /* Assign TifxedTypedArray object to SIMD128 object */ \
1728 mov(FieldOperand(result, TYPE::kValueOffset), scratch1); \
1731 SIMD128_HEAP_ALLOCATE_FUNCTIONS(DECLARE_SIMD_HEAP_ALLOCATE_FUNCTION)
1734 void MacroAssembler::AllocateTwoByteString(Register result,
1739 Label* gc_required) {
1740 // Calculate the number of bytes needed for the characters in the string while
1741 // observing object alignment.
1742 ASSERT((SeqTwoByteString::kHeaderSize & kObjectAlignmentMask) == 0);
1743 ASSERT(kShortSize == 2);
1744 // scratch1 = length * 2 + kObjectAlignmentMask.
1745 lea(scratch1, Operand(length, length, times_1, kObjectAlignmentMask));
1746 and_(scratch1, Immediate(~kObjectAlignmentMask));
1748 // Allocate two byte string in new space.
1749 Allocate(SeqTwoByteString::kHeaderSize,
1752 REGISTER_VALUE_IS_INT32,
1759 // Set the map, length and hash field.
1760 mov(FieldOperand(result, HeapObject::kMapOffset),
1761 Immediate(isolate()->factory()->string_map()));
1762 mov(scratch1, length);
1764 mov(FieldOperand(result, String::kLengthOffset), scratch1);
1765 mov(FieldOperand(result, String::kHashFieldOffset),
1766 Immediate(String::kEmptyHashField));
1770 void MacroAssembler::AllocateAsciiString(Register result,
1775 Label* gc_required) {
1776 // Calculate the number of bytes needed for the characters in the string while
1777 // observing object alignment.
1778 ASSERT((SeqOneByteString::kHeaderSize & kObjectAlignmentMask) == 0);
1779 mov(scratch1, length);
1780 ASSERT(kCharSize == 1);
1781 add(scratch1, Immediate(kObjectAlignmentMask));
1782 and_(scratch1, Immediate(~kObjectAlignmentMask));
1784 // Allocate ASCII string in new space.
1785 Allocate(SeqOneByteString::kHeaderSize,
1788 REGISTER_VALUE_IS_INT32,
1795 // Set the map, length and hash field.
1796 mov(FieldOperand(result, HeapObject::kMapOffset),
1797 Immediate(isolate()->factory()->ascii_string_map()));
1798 mov(scratch1, length);
1800 mov(FieldOperand(result, String::kLengthOffset), scratch1);
1801 mov(FieldOperand(result, String::kHashFieldOffset),
1802 Immediate(String::kEmptyHashField));
1806 void MacroAssembler::AllocateAsciiString(Register result,
1810 Label* gc_required) {
1813 // Allocate ASCII string in new space.
1814 Allocate(SeqOneByteString::SizeFor(length), result, scratch1, scratch2,
1815 gc_required, TAG_OBJECT);
1817 // Set the map, length and hash field.
1818 mov(FieldOperand(result, HeapObject::kMapOffset),
1819 Immediate(isolate()->factory()->ascii_string_map()));
1820 mov(FieldOperand(result, String::kLengthOffset),
1821 Immediate(Smi::FromInt(length)));
1822 mov(FieldOperand(result, String::kHashFieldOffset),
1823 Immediate(String::kEmptyHashField));
1827 void MacroAssembler::AllocateTwoByteConsString(Register result,
1830 Label* gc_required) {
1831 // Allocate heap number in new space.
1832 Allocate(ConsString::kSize, result, scratch1, scratch2, gc_required,
1835 // Set the map. The other fields are left uninitialized.
1836 mov(FieldOperand(result, HeapObject::kMapOffset),
1837 Immediate(isolate()->factory()->cons_string_map()));
1841 void MacroAssembler::AllocateAsciiConsString(Register result,
1844 Label* gc_required) {
1845 Allocate(ConsString::kSize,
1852 // Set the map. The other fields are left uninitialized.
1853 mov(FieldOperand(result, HeapObject::kMapOffset),
1854 Immediate(isolate()->factory()->cons_ascii_string_map()));
1858 void MacroAssembler::AllocateTwoByteSlicedString(Register result,
1861 Label* gc_required) {
1862 // Allocate heap number in new space.
1863 Allocate(SlicedString::kSize, result, scratch1, scratch2, gc_required,
1866 // Set the map. The other fields are left uninitialized.
1867 mov(FieldOperand(result, HeapObject::kMapOffset),
1868 Immediate(isolate()->factory()->sliced_string_map()));
1872 void MacroAssembler::AllocateAsciiSlicedString(Register result,
1875 Label* gc_required) {
1876 // Allocate heap number in new space.
1877 Allocate(SlicedString::kSize, result, scratch1, scratch2, gc_required,
1880 // Set the map. The other fields are left uninitialized.
1881 mov(FieldOperand(result, HeapObject::kMapOffset),
1882 Immediate(isolate()->factory()->sliced_ascii_string_map()));
1886 // Copy memory, byte-by-byte, from source to destination. Not optimized for
1887 // long or aligned copies. The contents of scratch and length are destroyed.
1888 // Source and destination are incremented by length.
1889 // Many variants of movsb, loop unrolling, word moves, and indexed operands
1890 // have been tried here already, and this is fastest.
1891 // A simpler loop is faster on small copies, but 30% slower on large ones.
1892 // The cld() instruction must have been emitted, to set the direction flag(),
1893 // before calling this function.
1894 void MacroAssembler::CopyBytes(Register source,
1895 Register destination,
1898 Label short_loop, len4, len8, len12, done, short_string;
1899 ASSERT(source.is(esi));
1900 ASSERT(destination.is(edi));
1901 ASSERT(length.is(ecx));
1902 cmp(length, Immediate(4));
1903 j(below, &short_string, Label::kNear);
1905 // Because source is 4-byte aligned in our uses of this function,
1906 // we keep source aligned for the rep_movs call by copying the odd bytes
1907 // at the end of the ranges.
1908 mov(scratch, Operand(source, length, times_1, -4));
1909 mov(Operand(destination, length, times_1, -4), scratch);
1911 cmp(length, Immediate(8));
1912 j(below_equal, &len4, Label::kNear);
1913 cmp(length, Immediate(12));
1914 j(below_equal, &len8, Label::kNear);
1915 cmp(length, Immediate(16));
1916 j(below_equal, &len12, Label::kNear);
1921 and_(scratch, Immediate(0x3));
1922 add(destination, scratch);
1923 jmp(&done, Label::kNear);
1926 mov(scratch, Operand(source, 8));
1927 mov(Operand(destination, 8), scratch);
1929 mov(scratch, Operand(source, 4));
1930 mov(Operand(destination, 4), scratch);
1932 mov(scratch, Operand(source, 0));
1933 mov(Operand(destination, 0), scratch);
1934 add(destination, length);
1935 jmp(&done, Label::kNear);
1937 bind(&short_string);
1938 test(length, length);
1939 j(zero, &done, Label::kNear);
1942 mov_b(scratch, Operand(source, 0));
1943 mov_b(Operand(destination, 0), scratch);
1947 j(not_zero, &short_loop);
1953 void MacroAssembler::InitializeFieldsWithFiller(Register start_offset,
1954 Register end_offset,
1959 mov(Operand(start_offset, 0), filler);
1960 add(start_offset, Immediate(kPointerSize));
1962 cmp(start_offset, end_offset);
1967 void MacroAssembler::BooleanBitTest(Register object,
1970 bit_index += kSmiTagSize + kSmiShiftSize;
1971 ASSERT(IsPowerOf2(kBitsPerByte));
1972 int byte_index = bit_index / kBitsPerByte;
1973 int byte_bit_index = bit_index & (kBitsPerByte - 1);
1974 test_b(FieldOperand(object, field_offset + byte_index),
1975 static_cast<byte>(1 << byte_bit_index));
1980 void MacroAssembler::NegativeZeroTest(Register result,
1982 Label* then_label) {
1984 test(result, result);
1987 j(sign, then_label);
1992 void MacroAssembler::NegativeZeroTest(Register result,
1996 Label* then_label) {
1998 test(result, result);
2002 j(sign, then_label);
2007 void MacroAssembler::TryGetFunctionPrototype(Register function,
2011 bool miss_on_bound_function) {
2012 // Check that the receiver isn't a smi.
2013 JumpIfSmi(function, miss);
2015 // Check that the function really is a function.
2016 CmpObjectType(function, JS_FUNCTION_TYPE, result);
2019 if (miss_on_bound_function) {
2020 // If a bound function, go to miss label.
2022 FieldOperand(function, JSFunction::kSharedFunctionInfoOffset));
2023 BooleanBitTest(scratch, SharedFunctionInfo::kCompilerHintsOffset,
2024 SharedFunctionInfo::kBoundFunction);
2028 // Make sure that the function has an instance prototype.
2030 movzx_b(scratch, FieldOperand(result, Map::kBitFieldOffset));
2031 test(scratch, Immediate(1 << Map::kHasNonInstancePrototype));
2032 j(not_zero, &non_instance);
2034 // Get the prototype or initial map from the function.
2036 FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
2038 // If the prototype or initial map is the hole, don't return it and
2039 // simply miss the cache instead. This will allow us to allocate a
2040 // prototype object on-demand in the runtime system.
2041 cmp(result, Immediate(isolate()->factory()->the_hole_value()));
2044 // If the function does not have an initial map, we're done.
2046 CmpObjectType(result, MAP_TYPE, scratch);
2047 j(not_equal, &done);
2049 // Get the prototype from the initial map.
2050 mov(result, FieldOperand(result, Map::kPrototypeOffset));
2053 // Non-instance prototype: Fetch prototype from constructor field
2055 bind(&non_instance);
2056 mov(result, FieldOperand(result, Map::kConstructorOffset));
2063 void MacroAssembler::CallStub(CodeStub* stub, TypeFeedbackId ast_id) {
2064 ASSERT(AllowThisStubCall(stub)); // Calls are not allowed in some stubs.
2065 call(stub->GetCode(), RelocInfo::CODE_TARGET, ast_id);
2069 void MacroAssembler::TailCallStub(CodeStub* stub) {
2070 jmp(stub->GetCode(), RelocInfo::CODE_TARGET);
2074 void MacroAssembler::StubReturn(int argc) {
2075 ASSERT(argc >= 1 && generating_stub());
2076 ret((argc - 1) * kPointerSize);
2080 bool MacroAssembler::AllowThisStubCall(CodeStub* stub) {
2081 return has_frame_ || !stub->SometimesSetsUpAFrame();
2085 void MacroAssembler::IndexFromHash(Register hash, Register index) {
2086 // The assert checks that the constants for the maximum number of digits
2087 // for an array index cached in the hash field and the number of bits
2088 // reserved for it does not conflict.
2089 ASSERT(TenToThe(String::kMaxCachedArrayIndexLength) <
2090 (1 << String::kArrayIndexValueBits));
2091 if (!index.is(hash)) {
2094 DecodeFieldToSmi<String::ArrayIndexValueBits>(index);
2098 void MacroAssembler::CallRuntime(const Runtime::Function* f,
2100 SaveFPRegsMode save_doubles) {
2101 // If the expected number of arguments of the runtime function is
2102 // constant, we check that the actual number of arguments match the
2104 CHECK(f->nargs < 0 || f->nargs == num_arguments);
2106 // TODO(1236192): Most runtime routines don't need the number of
2107 // arguments passed in because it is constant. At some point we
2108 // should remove this need and make the runtime routine entry code
2110 Move(eax, Immediate(num_arguments));
2111 mov(ebx, Immediate(ExternalReference(f, isolate())));
2112 CEntryStub ces(isolate(), 1, save_doubles);
2117 void MacroAssembler::CallExternalReference(ExternalReference ref,
2118 int num_arguments) {
2119 mov(eax, Immediate(num_arguments));
2120 mov(ebx, Immediate(ref));
2122 CEntryStub stub(isolate(), 1);
2127 void MacroAssembler::TailCallExternalReference(const ExternalReference& ext,
2130 // TODO(1236192): Most runtime routines don't need the number of
2131 // arguments passed in because it is constant. At some point we
2132 // should remove this need and make the runtime routine entry code
2134 Move(eax, Immediate(num_arguments));
2135 JumpToExternalReference(ext);
2139 void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid,
2142 TailCallExternalReference(ExternalReference(fid, isolate()),
2148 Operand ApiParameterOperand(int index) {
2149 return Operand(esp, index * kPointerSize);
2153 void MacroAssembler::PrepareCallApiFunction(int argc) {
2154 EnterApiExitFrame(argc);
2155 if (emit_debug_code()) {
2156 mov(esi, Immediate(BitCast<int32_t>(kZapValue)));
2161 void MacroAssembler::CallApiFunctionAndReturn(
2162 Register function_address,
2163 ExternalReference thunk_ref,
2164 Operand thunk_last_arg,
2166 Operand return_value_operand,
2167 Operand* context_restore_operand) {
2168 ExternalReference next_address =
2169 ExternalReference::handle_scope_next_address(isolate());
2170 ExternalReference limit_address =
2171 ExternalReference::handle_scope_limit_address(isolate());
2172 ExternalReference level_address =
2173 ExternalReference::handle_scope_level_address(isolate());
2175 ASSERT(edx.is(function_address));
2176 // Allocate HandleScope in callee-save registers.
2177 mov(ebx, Operand::StaticVariable(next_address));
2178 mov(edi, Operand::StaticVariable(limit_address));
2179 add(Operand::StaticVariable(level_address), Immediate(1));
2181 if (FLAG_log_timer_events) {
2182 FrameScope frame(this, StackFrame::MANUAL);
2183 PushSafepointRegisters();
2184 PrepareCallCFunction(1, eax);
2185 mov(Operand(esp, 0),
2186 Immediate(ExternalReference::isolate_address(isolate())));
2187 CallCFunction(ExternalReference::log_enter_external_function(isolate()), 1);
2188 PopSafepointRegisters();
2192 Label profiler_disabled;
2193 Label end_profiler_check;
2194 mov(eax, Immediate(ExternalReference::is_profiling_address(isolate())));
2195 cmpb(Operand(eax, 0), 0);
2196 j(zero, &profiler_disabled);
2198 // Additional parameter is the address of the actual getter function.
2199 mov(thunk_last_arg, function_address);
2200 // Call the api function.
2201 mov(eax, Immediate(thunk_ref));
2203 jmp(&end_profiler_check);
2205 bind(&profiler_disabled);
2206 // Call the api function.
2207 call(function_address);
2208 bind(&end_profiler_check);
2210 if (FLAG_log_timer_events) {
2211 FrameScope frame(this, StackFrame::MANUAL);
2212 PushSafepointRegisters();
2213 PrepareCallCFunction(1, eax);
2214 mov(Operand(esp, 0),
2215 Immediate(ExternalReference::isolate_address(isolate())));
2216 CallCFunction(ExternalReference::log_leave_external_function(isolate()), 1);
2217 PopSafepointRegisters();
2221 // Load the value from ReturnValue
2222 mov(eax, return_value_operand);
2224 Label promote_scheduled_exception;
2225 Label exception_handled;
2226 Label delete_allocated_handles;
2227 Label leave_exit_frame;
2230 // No more valid handles (the result handle was the last one). Restore
2231 // previous handle scope.
2232 mov(Operand::StaticVariable(next_address), ebx);
2233 sub(Operand::StaticVariable(level_address), Immediate(1));
2234 Assert(above_equal, kInvalidHandleScopeLevel);
2235 cmp(edi, Operand::StaticVariable(limit_address));
2236 j(not_equal, &delete_allocated_handles);
2237 bind(&leave_exit_frame);
2239 // Check if the function scheduled an exception.
2240 ExternalReference scheduled_exception_address =
2241 ExternalReference::scheduled_exception_address(isolate());
2242 cmp(Operand::StaticVariable(scheduled_exception_address),
2243 Immediate(isolate()->factory()->the_hole_value()));
2244 j(not_equal, &promote_scheduled_exception);
2245 bind(&exception_handled);
2247 #if ENABLE_EXTRA_CHECKS
2248 // Check if the function returned a valid JavaScript value.
2250 Register return_value = eax;
2253 JumpIfSmi(return_value, &ok, Label::kNear);
2254 mov(map, FieldOperand(return_value, HeapObject::kMapOffset));
2256 CmpInstanceType(map, FIRST_NONSTRING_TYPE);
2257 j(below, &ok, Label::kNear);
2259 CmpInstanceType(map, FIRST_SPEC_OBJECT_TYPE);
2260 j(above_equal, &ok, Label::kNear);
2262 cmp(map, isolate()->factory()->heap_number_map());
2263 j(equal, &ok, Label::kNear);
2265 cmp(return_value, isolate()->factory()->undefined_value());
2266 j(equal, &ok, Label::kNear);
2268 cmp(return_value, isolate()->factory()->true_value());
2269 j(equal, &ok, Label::kNear);
2271 cmp(return_value, isolate()->factory()->false_value());
2272 j(equal, &ok, Label::kNear);
2274 cmp(return_value, isolate()->factory()->null_value());
2275 j(equal, &ok, Label::kNear);
2277 Abort(kAPICallReturnedInvalidObject);
2282 bool restore_context = context_restore_operand != NULL;
2283 if (restore_context) {
2284 mov(esi, *context_restore_operand);
2286 LeaveApiExitFrame(!restore_context);
2287 ret(stack_space * kPointerSize);
2289 bind(&promote_scheduled_exception);
2291 FrameScope frame(this, StackFrame::INTERNAL);
2292 CallRuntime(Runtime::kHiddenPromoteScheduledException, 0);
2294 jmp(&exception_handled);
2296 // HandleScope limit has changed. Delete allocated extensions.
2297 ExternalReference delete_extensions =
2298 ExternalReference::delete_handle_scope_extensions(isolate());
2299 bind(&delete_allocated_handles);
2300 mov(Operand::StaticVariable(limit_address), edi);
2302 mov(Operand(esp, 0),
2303 Immediate(ExternalReference::isolate_address(isolate())));
2304 mov(eax, Immediate(delete_extensions));
2307 jmp(&leave_exit_frame);
2311 void MacroAssembler::JumpToExternalReference(const ExternalReference& ext) {
2312 // Set the entry point and jump to the C entry runtime stub.
2313 mov(ebx, Immediate(ext));
2314 CEntryStub ces(isolate(), 1);
2315 jmp(ces.GetCode(), RelocInfo::CODE_TARGET);
2319 void MacroAssembler::InvokePrologue(const ParameterCount& expected,
2320 const ParameterCount& actual,
2321 Handle<Code> code_constant,
2322 const Operand& code_operand,
2324 bool* definitely_mismatches,
2326 Label::Distance done_near,
2327 const CallWrapper& call_wrapper) {
2328 bool definitely_matches = false;
2329 *definitely_mismatches = false;
2331 if (expected.is_immediate()) {
2332 ASSERT(actual.is_immediate());
2333 if (expected.immediate() == actual.immediate()) {
2334 definitely_matches = true;
2336 mov(eax, actual.immediate());
2337 const int sentinel = SharedFunctionInfo::kDontAdaptArgumentsSentinel;
2338 if (expected.immediate() == sentinel) {
2339 // Don't worry about adapting arguments for builtins that
2340 // don't want that done. Skip adaption code by making it look
2341 // like we have a match between expected and actual number of
2343 definitely_matches = true;
2345 *definitely_mismatches = true;
2346 mov(ebx, expected.immediate());
2350 if (actual.is_immediate()) {
2351 // Expected is in register, actual is immediate. This is the
2352 // case when we invoke function values without going through the
2354 cmp(expected.reg(), actual.immediate());
2356 ASSERT(expected.reg().is(ebx));
2357 mov(eax, actual.immediate());
2358 } else if (!expected.reg().is(actual.reg())) {
2359 // Both expected and actual are in (different) registers. This
2360 // is the case when we invoke functions using call and apply.
2361 cmp(expected.reg(), actual.reg());
2363 ASSERT(actual.reg().is(eax));
2364 ASSERT(expected.reg().is(ebx));
2368 if (!definitely_matches) {
2369 Handle<Code> adaptor =
2370 isolate()->builtins()->ArgumentsAdaptorTrampoline();
2371 if (!code_constant.is_null()) {
2372 mov(edx, Immediate(code_constant));
2373 add(edx, Immediate(Code::kHeaderSize - kHeapObjectTag));
2374 } else if (!code_operand.is_reg(edx)) {
2375 mov(edx, code_operand);
2378 if (flag == CALL_FUNCTION) {
2379 call_wrapper.BeforeCall(CallSize(adaptor, RelocInfo::CODE_TARGET));
2380 call(adaptor, RelocInfo::CODE_TARGET);
2381 call_wrapper.AfterCall();
2382 if (!*definitely_mismatches) {
2383 jmp(done, done_near);
2386 jmp(adaptor, RelocInfo::CODE_TARGET);
2393 void MacroAssembler::InvokeCode(const Operand& code,
2394 const ParameterCount& expected,
2395 const ParameterCount& actual,
2397 const CallWrapper& call_wrapper) {
2398 // You can't call a function without a valid frame.
2399 ASSERT(flag == JUMP_FUNCTION || has_frame());
2402 bool definitely_mismatches = false;
2403 InvokePrologue(expected, actual, Handle<Code>::null(), code,
2404 &done, &definitely_mismatches, flag, Label::kNear,
2406 if (!definitely_mismatches) {
2407 if (flag == CALL_FUNCTION) {
2408 call_wrapper.BeforeCall(CallSize(code));
2410 call_wrapper.AfterCall();
2412 ASSERT(flag == JUMP_FUNCTION);
2420 void MacroAssembler::InvokeFunction(Register fun,
2421 const ParameterCount& actual,
2423 const CallWrapper& call_wrapper) {
2424 // You can't call a function without a valid frame.
2425 ASSERT(flag == JUMP_FUNCTION || has_frame());
2427 ASSERT(fun.is(edi));
2428 mov(edx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
2429 mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
2430 mov(ebx, FieldOperand(edx, SharedFunctionInfo::kFormalParameterCountOffset));
2433 ParameterCount expected(ebx);
2434 InvokeCode(FieldOperand(edi, JSFunction::kCodeEntryOffset),
2435 expected, actual, flag, call_wrapper);
2439 void MacroAssembler::InvokeFunction(Register fun,
2440 const ParameterCount& expected,
2441 const ParameterCount& actual,
2443 const CallWrapper& call_wrapper) {
2444 // You can't call a function without a valid frame.
2445 ASSERT(flag == JUMP_FUNCTION || has_frame());
2447 ASSERT(fun.is(edi));
2448 mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
2450 InvokeCode(FieldOperand(edi, JSFunction::kCodeEntryOffset),
2451 expected, actual, flag, call_wrapper);
2455 void MacroAssembler::InvokeFunction(Handle<JSFunction> function,
2456 const ParameterCount& expected,
2457 const ParameterCount& actual,
2459 const CallWrapper& call_wrapper) {
2460 LoadHeapObject(edi, function);
2461 InvokeFunction(edi, expected, actual, flag, call_wrapper);
2465 void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id,
2467 const CallWrapper& call_wrapper) {
2468 // You can't call a builtin without a valid frame.
2469 ASSERT(flag == JUMP_FUNCTION || has_frame());
2471 // Rely on the assertion to check that the number of provided
2472 // arguments match the expected number of arguments. Fake a
2473 // parameter count to avoid emitting code to do the check.
2474 ParameterCount expected(0);
2475 GetBuiltinFunction(edi, id);
2476 InvokeCode(FieldOperand(edi, JSFunction::kCodeEntryOffset),
2477 expected, expected, flag, call_wrapper);
2481 void MacroAssembler::GetBuiltinFunction(Register target,
2482 Builtins::JavaScript id) {
2483 // Load the JavaScript builtin function from the builtins object.
2484 mov(target, Operand(esi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
2485 mov(target, FieldOperand(target, GlobalObject::kBuiltinsOffset));
2486 mov(target, FieldOperand(target,
2487 JSBuiltinsObject::OffsetOfFunctionWithId(id)));
2491 void MacroAssembler::GetBuiltinEntry(Register target, Builtins::JavaScript id) {
2492 ASSERT(!target.is(edi));
2493 // Load the JavaScript builtin function from the builtins object.
2494 GetBuiltinFunction(edi, id);
2495 // Load the code entry point from the function into the target register.
2496 mov(target, FieldOperand(edi, JSFunction::kCodeEntryOffset));
2500 void MacroAssembler::LoadContext(Register dst, int context_chain_length) {
2501 if (context_chain_length > 0) {
2502 // Move up the chain of contexts to the context containing the slot.
2503 mov(dst, Operand(esi, Context::SlotOffset(Context::PREVIOUS_INDEX)));
2504 for (int i = 1; i < context_chain_length; i++) {
2505 mov(dst, Operand(dst, Context::SlotOffset(Context::PREVIOUS_INDEX)));
2508 // Slot is in the current function context. Move it into the
2509 // destination register in case we store into it (the write barrier
2510 // cannot be allowed to destroy the context in esi).
2514 // We should not have found a with context by walking the context chain
2515 // (i.e., the static scope chain and runtime context chain do not agree).
2516 // A variable occurring in such a scope should have slot type LOOKUP and
2518 if (emit_debug_code()) {
2519 cmp(FieldOperand(dst, HeapObject::kMapOffset),
2520 isolate()->factory()->with_context_map());
2521 Check(not_equal, kVariableResolvedToWithContext);
2526 void MacroAssembler::LoadTransitionedArrayMapConditional(
2527 ElementsKind expected_kind,
2528 ElementsKind transitioned_kind,
2529 Register map_in_out,
2531 Label* no_map_match) {
2532 // Load the global or builtins object from the current context.
2533 mov(scratch, Operand(esi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
2534 mov(scratch, FieldOperand(scratch, GlobalObject::kNativeContextOffset));
2536 // Check that the function's map is the same as the expected cached map.
2537 mov(scratch, Operand(scratch,
2538 Context::SlotOffset(Context::JS_ARRAY_MAPS_INDEX)));
2540 size_t offset = expected_kind * kPointerSize +
2541 FixedArrayBase::kHeaderSize;
2542 cmp(map_in_out, FieldOperand(scratch, offset));
2543 j(not_equal, no_map_match);
2545 // Use the transitioned cached map.
2546 offset = transitioned_kind * kPointerSize +
2547 FixedArrayBase::kHeaderSize;
2548 mov(map_in_out, FieldOperand(scratch, offset));
2552 void MacroAssembler::LoadGlobalFunction(int index, Register function) {
2553 // Load the global or builtins object from the current context.
2555 Operand(esi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
2556 // Load the native context from the global or builtins object.
2558 FieldOperand(function, GlobalObject::kNativeContextOffset));
2559 // Load the function from the native context.
2560 mov(function, Operand(function, Context::SlotOffset(index)));
2564 void MacroAssembler::LoadGlobalFunctionInitialMap(Register function,
2566 // Load the initial map. The global functions all have initial maps.
2567 mov(map, FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
2568 if (emit_debug_code()) {
2570 CheckMap(map, isolate()->factory()->meta_map(), &fail, DO_SMI_CHECK);
2573 Abort(kGlobalFunctionsMustHaveInitialMap);
2579 // Store the value in register src in the safepoint register stack
2580 // slot for register dst.
2581 void MacroAssembler::StoreToSafepointRegisterSlot(Register dst, Register src) {
2582 mov(SafepointRegisterSlot(dst), src);
2586 void MacroAssembler::StoreToSafepointRegisterSlot(Register dst, Immediate src) {
2587 mov(SafepointRegisterSlot(dst), src);
2591 void MacroAssembler::LoadFromSafepointRegisterSlot(Register dst, Register src) {
2592 mov(dst, SafepointRegisterSlot(src));
2596 Operand MacroAssembler::SafepointRegisterSlot(Register reg) {
2597 return Operand(esp, SafepointRegisterStackIndex(reg.code()) * kPointerSize);
2601 int MacroAssembler::SafepointRegisterStackIndex(int reg_code) {
2602 // The registers are pushed starting with the lowest encoding,
2603 // which means that lowest encodings are furthest away from
2604 // the stack pointer.
2605 ASSERT(reg_code >= 0 && reg_code < kNumSafepointRegisters);
2606 return kNumSafepointRegisters - reg_code - 1;
2610 void MacroAssembler::LoadHeapObject(Register result,
2611 Handle<HeapObject> object) {
2612 AllowDeferredHandleDereference embedding_raw_address;
2613 if (isolate()->heap()->InNewSpace(*object)) {
2614 Handle<Cell> cell = isolate()->factory()->NewCell(object);
2615 mov(result, Operand::ForCell(cell));
2617 mov(result, object);
2622 void MacroAssembler::CmpHeapObject(Register reg, Handle<HeapObject> object) {
2623 AllowDeferredHandleDereference using_raw_address;
2624 if (isolate()->heap()->InNewSpace(*object)) {
2625 Handle<Cell> cell = isolate()->factory()->NewCell(object);
2626 cmp(reg, Operand::ForCell(cell));
2633 void MacroAssembler::PushHeapObject(Handle<HeapObject> object) {
2634 AllowDeferredHandleDereference using_raw_address;
2635 if (isolate()->heap()->InNewSpace(*object)) {
2636 Handle<Cell> cell = isolate()->factory()->NewCell(object);
2637 push(Operand::ForCell(cell));
2644 void MacroAssembler::Ret() {
2649 void MacroAssembler::Ret(int bytes_dropped, Register scratch) {
2650 if (is_uint16(bytes_dropped)) {
2654 add(esp, Immediate(bytes_dropped));
2661 void MacroAssembler::Drop(int stack_elements) {
2662 if (stack_elements > 0) {
2663 add(esp, Immediate(stack_elements * kPointerSize));
2668 void MacroAssembler::Move(Register dst, Register src) {
2675 void MacroAssembler::Move(Register dst, const Immediate& x) {
2677 xor_(dst, dst); // Shorter than mov of 32-bit immediate 0.
2684 void MacroAssembler::Move(const Operand& dst, const Immediate& x) {
2689 void MacroAssembler::Move(XMMRegister dst, double val) {
2690 // TODO(titzer): recognize double constants with ExternalReferences.
2691 uint64_t int_val = BitCast<uint64_t, double>(val);
2695 int32_t lower = static_cast<int32_t>(int_val);
2696 int32_t upper = static_cast<int32_t>(int_val >> kBitsPerInt);
2697 push(Immediate(upper));
2698 push(Immediate(lower));
2699 movsd(dst, Operand(esp, 0));
2700 add(esp, Immediate(kDoubleSize));
2705 void MacroAssembler::SetCounter(StatsCounter* counter, int value) {
2706 if (FLAG_native_code_counters && counter->Enabled()) {
2707 mov(Operand::StaticVariable(ExternalReference(counter)), Immediate(value));
2712 void MacroAssembler::IncrementCounter(StatsCounter* counter, int value) {
2714 if (FLAG_native_code_counters && counter->Enabled()) {
2715 Operand operand = Operand::StaticVariable(ExternalReference(counter));
2719 add(operand, Immediate(value));
2725 void MacroAssembler::DecrementCounter(StatsCounter* counter, int value) {
2727 if (FLAG_native_code_counters && counter->Enabled()) {
2728 Operand operand = Operand::StaticVariable(ExternalReference(counter));
2732 sub(operand, Immediate(value));
2738 void MacroAssembler::IncrementCounter(Condition cc,
2739 StatsCounter* counter,
2742 if (FLAG_native_code_counters && counter->Enabled()) {
2744 j(NegateCondition(cc), &skip);
2746 IncrementCounter(counter, value);
2753 void MacroAssembler::DecrementCounter(Condition cc,
2754 StatsCounter* counter,
2757 if (FLAG_native_code_counters && counter->Enabled()) {
2759 j(NegateCondition(cc), &skip);
2761 DecrementCounter(counter, value);
2768 void MacroAssembler::Assert(Condition cc, BailoutReason reason) {
2769 if (emit_debug_code()) Check(cc, reason);
2773 void MacroAssembler::AssertFastElements(Register elements) {
2774 if (emit_debug_code()) {
2775 Factory* factory = isolate()->factory();
2777 cmp(FieldOperand(elements, HeapObject::kMapOffset),
2778 Immediate(factory->fixed_array_map()));
2780 cmp(FieldOperand(elements, HeapObject::kMapOffset),
2781 Immediate(factory->fixed_double_array_map()));
2783 cmp(FieldOperand(elements, HeapObject::kMapOffset),
2784 Immediate(factory->fixed_cow_array_map()));
2786 Abort(kJSObjectWithFastElementsMapHasSlowElements);
2792 void MacroAssembler::Check(Condition cc, BailoutReason reason) {
2796 // will not return here
2801 void MacroAssembler::CheckStackAlignment() {
2802 int frame_alignment = OS::ActivationFrameAlignment();
2803 int frame_alignment_mask = frame_alignment - 1;
2804 if (frame_alignment > kPointerSize) {
2805 ASSERT(IsPowerOf2(frame_alignment));
2806 Label alignment_as_expected;
2807 test(esp, Immediate(frame_alignment_mask));
2808 j(zero, &alignment_as_expected);
2809 // Abort if stack is not aligned.
2811 bind(&alignment_as_expected);
2816 void MacroAssembler::Abort(BailoutReason reason) {
2818 const char* msg = GetBailoutReason(reason);
2820 RecordComment("Abort message: ");
2824 if (FLAG_trap_on_abort) {
2830 push(Immediate(reinterpret_cast<intptr_t>(Smi::FromInt(reason))));
2831 // Disable stub call restrictions to always allow calls to abort.
2833 // We don't actually want to generate a pile of code for this, so just
2834 // claim there is a stack frame, without generating one.
2835 FrameScope scope(this, StackFrame::NONE);
2836 CallRuntime(Runtime::kAbort, 1);
2838 CallRuntime(Runtime::kAbort, 1);
2840 // will not return here
2845 void MacroAssembler::LoadInstanceDescriptors(Register map,
2846 Register descriptors) {
2847 mov(descriptors, FieldOperand(map, Map::kDescriptorsOffset));
2851 void MacroAssembler::NumberOfOwnDescriptors(Register dst, Register map) {
2852 mov(dst, FieldOperand(map, Map::kBitField3Offset));
2853 DecodeField<Map::NumberOfOwnDescriptorsBits>(dst);
2857 void MacroAssembler::LoadPowerOf2(XMMRegister dst,
2860 ASSERT(is_uintn(power + HeapNumber::kExponentBias,
2861 HeapNumber::kExponentBits));
2862 mov(scratch, Immediate(power + HeapNumber::kExponentBias));
2864 psllq(dst, HeapNumber::kMantissaBits);
2868 void MacroAssembler::LookupNumberStringCache(Register object,
2873 // Use of registers. Register result is used as a temporary.
2874 Register number_string_cache = result;
2875 Register mask = scratch1;
2876 Register scratch = scratch2;
2878 // Load the number string cache.
2879 LoadRoot(number_string_cache, Heap::kNumberStringCacheRootIndex);
2880 // Make the hash mask from the length of the number string cache. It
2881 // contains two elements (number and string) for each cache entry.
2882 mov(mask, FieldOperand(number_string_cache, FixedArray::kLengthOffset));
2883 shr(mask, kSmiTagSize + 1); // Untag length and divide it by two.
2884 sub(mask, Immediate(1)); // Make mask.
2886 // Calculate the entry in the number string cache. The hash value in the
2887 // number string cache for smis is just the smi value, and the hash for
2888 // doubles is the xor of the upper and lower words. See
2889 // Heap::GetNumberStringCache.
2890 Label smi_hash_calculated;
2891 Label load_result_from_cache;
2893 STATIC_ASSERT(kSmiTag == 0);
2894 JumpIfNotSmi(object, ¬_smi, Label::kNear);
2895 mov(scratch, object);
2897 jmp(&smi_hash_calculated, Label::kNear);
2899 cmp(FieldOperand(object, HeapObject::kMapOffset),
2900 isolate()->factory()->heap_number_map());
2901 j(not_equal, not_found);
2902 STATIC_ASSERT(8 == kDoubleSize);
2903 mov(scratch, FieldOperand(object, HeapNumber::kValueOffset));
2904 xor_(scratch, FieldOperand(object, HeapNumber::kValueOffset + 4));
2905 // Object is heap number and hash is now in scratch. Calculate cache index.
2906 and_(scratch, mask);
2907 Register index = scratch;
2908 Register probe = mask;
2910 FieldOperand(number_string_cache,
2912 times_twice_pointer_size,
2913 FixedArray::kHeaderSize));
2914 JumpIfSmi(probe, not_found);
2915 movsd(xmm0, FieldOperand(object, HeapNumber::kValueOffset));
2916 ucomisd(xmm0, FieldOperand(probe, HeapNumber::kValueOffset));
2917 j(parity_even, not_found); // Bail out if NaN is involved.
2918 j(not_equal, not_found); // The cache did not contain this value.
2919 jmp(&load_result_from_cache, Label::kNear);
2921 bind(&smi_hash_calculated);
2922 // Object is smi and hash is now in scratch. Calculate cache index.
2923 and_(scratch, mask);
2924 // Check if the entry is the smi we are looking for.
2926 FieldOperand(number_string_cache,
2928 times_twice_pointer_size,
2929 FixedArray::kHeaderSize));
2930 j(not_equal, not_found);
2932 // Get the result from the cache.
2933 bind(&load_result_from_cache);
2935 FieldOperand(number_string_cache,
2937 times_twice_pointer_size,
2938 FixedArray::kHeaderSize + kPointerSize));
2939 IncrementCounter(isolate()->counters()->number_to_string_native(), 1);
2943 void MacroAssembler::JumpIfInstanceTypeIsNotSequentialAscii(
2944 Register instance_type,
2947 if (!scratch.is(instance_type)) {
2948 mov(scratch, instance_type);
2951 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask);
2952 cmp(scratch, kStringTag | kSeqStringTag | kOneByteStringTag);
2953 j(not_equal, failure);
2957 void MacroAssembler::JumpIfNotBothSequentialAsciiStrings(Register object1,
2962 // Check that both objects are not smis.
2963 STATIC_ASSERT(kSmiTag == 0);
2964 mov(scratch1, object1);
2965 and_(scratch1, object2);
2966 JumpIfSmi(scratch1, failure);
2968 // Load instance type for both strings.
2969 mov(scratch1, FieldOperand(object1, HeapObject::kMapOffset));
2970 mov(scratch2, FieldOperand(object2, HeapObject::kMapOffset));
2971 movzx_b(scratch1, FieldOperand(scratch1, Map::kInstanceTypeOffset));
2972 movzx_b(scratch2, FieldOperand(scratch2, Map::kInstanceTypeOffset));
2974 // Check that both are flat ASCII strings.
2975 const int kFlatAsciiStringMask =
2976 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
2977 const int kFlatAsciiStringTag =
2978 kStringTag | kOneByteStringTag | kSeqStringTag;
2979 // Interleave bits from both instance types and compare them in one check.
2980 ASSERT_EQ(0, kFlatAsciiStringMask & (kFlatAsciiStringMask << 3));
2981 and_(scratch1, kFlatAsciiStringMask);
2982 and_(scratch2, kFlatAsciiStringMask);
2983 lea(scratch1, Operand(scratch1, scratch2, times_8, 0));
2984 cmp(scratch1, kFlatAsciiStringTag | (kFlatAsciiStringTag << 3));
2985 j(not_equal, failure);
2989 void MacroAssembler::JumpIfNotUniqueName(Operand operand,
2990 Label* not_unique_name,
2991 Label::Distance distance) {
2992 STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0);
2994 test(operand, Immediate(kIsNotStringMask | kIsNotInternalizedMask));
2996 cmpb(operand, static_cast<uint8_t>(SYMBOL_TYPE));
2997 j(not_equal, not_unique_name, distance);
3003 void MacroAssembler::EmitSeqStringSetCharCheck(Register string,
3006 uint32_t encoding_mask) {
3008 JumpIfNotSmi(string, &is_object, Label::kNear);
3013 mov(value, FieldOperand(string, HeapObject::kMapOffset));
3014 movzx_b(value, FieldOperand(value, Map::kInstanceTypeOffset));
3016 and_(value, Immediate(kStringRepresentationMask | kStringEncodingMask));
3017 cmp(value, Immediate(encoding_mask));
3019 Check(equal, kUnexpectedStringType);
3021 // The index is assumed to be untagged coming in, tag it to compare with the
3022 // string length without using a temp register, it is restored at the end of
3025 Check(no_overflow, kIndexIsTooLarge);
3027 cmp(index, FieldOperand(string, String::kLengthOffset));
3028 Check(less, kIndexIsTooLarge);
3030 cmp(index, Immediate(Smi::FromInt(0)));
3031 Check(greater_equal, kIndexIsNegative);
3033 // Restore the index
3038 void MacroAssembler::PrepareCallCFunction(int num_arguments, Register scratch) {
3039 int frame_alignment = OS::ActivationFrameAlignment();
3040 if (frame_alignment != 0) {
3041 // Make stack end at alignment and make room for num_arguments words
3042 // and the original value of esp.
3044 sub(esp, Immediate((num_arguments + 1) * kPointerSize));
3045 ASSERT(IsPowerOf2(frame_alignment));
3046 and_(esp, -frame_alignment);
3047 mov(Operand(esp, num_arguments * kPointerSize), scratch);
3049 sub(esp, Immediate(num_arguments * kPointerSize));
3054 void MacroAssembler::CallCFunction(ExternalReference function,
3055 int num_arguments) {
3056 // Trashing eax is ok as it will be the return value.
3057 mov(eax, Immediate(function));
3058 CallCFunction(eax, num_arguments);
3062 void MacroAssembler::CallCFunction(Register function,
3063 int num_arguments) {
3064 ASSERT(has_frame());
3065 // Check stack alignment.
3066 if (emit_debug_code()) {
3067 CheckStackAlignment();
3071 if (OS::ActivationFrameAlignment() != 0) {
3072 mov(esp, Operand(esp, num_arguments * kPointerSize));
3074 add(esp, Immediate(num_arguments * kPointerSize));
3079 bool AreAliased(Register r1, Register r2, Register r3, Register r4) {
3080 if (r1.is(r2)) return true;
3081 if (r1.is(r3)) return true;
3082 if (r1.is(r4)) return true;
3083 if (r2.is(r3)) return true;
3084 if (r2.is(r4)) return true;
3085 if (r3.is(r4)) return true;
3090 CodePatcher::CodePatcher(byte* address, int size)
3091 : address_(address),
3093 masm_(NULL, address, size + Assembler::kGap) {
3094 // Create a new macro assembler pointing to the address of the code to patch.
3095 // The size is adjusted with kGap on order for the assembler to generate size
3096 // bytes of instructions without failing with buffer size constraints.
3097 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
3101 CodePatcher::~CodePatcher() {
3102 // Indicate that code has changed.
3103 CPU::FlushICache(address_, size_);
3105 // Check that the code was patched as expected.
3106 ASSERT(masm_.pc_ == address_ + size_);
3107 ASSERT(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
3111 void MacroAssembler::CheckPageFlag(
3116 Label* condition_met,
3117 Label::Distance condition_met_distance) {
3118 ASSERT(cc == zero || cc == not_zero);
3119 if (scratch.is(object)) {
3120 and_(scratch, Immediate(~Page::kPageAlignmentMask));
3122 mov(scratch, Immediate(~Page::kPageAlignmentMask));
3123 and_(scratch, object);
3125 if (mask < (1 << kBitsPerByte)) {
3126 test_b(Operand(scratch, MemoryChunk::kFlagsOffset),
3127 static_cast<uint8_t>(mask));
3129 test(Operand(scratch, MemoryChunk::kFlagsOffset), Immediate(mask));
3131 j(cc, condition_met, condition_met_distance);
3135 void MacroAssembler::CheckPageFlagForMap(
3139 Label* condition_met,
3140 Label::Distance condition_met_distance) {
3141 ASSERT(cc == zero || cc == not_zero);
3142 Page* page = Page::FromAddress(map->address());
3143 ExternalReference reference(ExternalReference::page_flags(page));
3144 // The inlined static address check of the page's flags relies
3145 // on maps never being compacted.
3146 ASSERT(!isolate()->heap()->mark_compact_collector()->
3147 IsOnEvacuationCandidate(*map));
3148 if (mask < (1 << kBitsPerByte)) {
3149 test_b(Operand::StaticVariable(reference), static_cast<uint8_t>(mask));
3151 test(Operand::StaticVariable(reference), Immediate(mask));
3153 j(cc, condition_met, condition_met_distance);
3157 void MacroAssembler::CheckMapDeprecated(Handle<Map> map,
3159 Label* if_deprecated) {
3160 if (map->CanBeDeprecated()) {
3162 mov(scratch, FieldOperand(scratch, Map::kBitField3Offset));
3163 and_(scratch, Immediate(Map::Deprecated::kMask));
3164 j(not_zero, if_deprecated);
3169 void MacroAssembler::JumpIfBlack(Register object,
3173 Label::Distance on_black_near) {
3174 HasColor(object, scratch0, scratch1,
3175 on_black, on_black_near,
3176 1, 0); // kBlackBitPattern.
3177 ASSERT(strcmp(Marking::kBlackBitPattern, "10") == 0);
3181 void MacroAssembler::HasColor(Register object,
3182 Register bitmap_scratch,
3183 Register mask_scratch,
3185 Label::Distance has_color_distance,
3188 ASSERT(!AreAliased(object, bitmap_scratch, mask_scratch, ecx));
3190 GetMarkBits(object, bitmap_scratch, mask_scratch);
3192 Label other_color, word_boundary;
3193 test(mask_scratch, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
3194 j(first_bit == 1 ? zero : not_zero, &other_color, Label::kNear);
3195 add(mask_scratch, mask_scratch); // Shift left 1 by adding.
3196 j(zero, &word_boundary, Label::kNear);
3197 test(mask_scratch, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
3198 j(second_bit == 1 ? not_zero : zero, has_color, has_color_distance);
3199 jmp(&other_color, Label::kNear);
3201 bind(&word_boundary);
3202 test_b(Operand(bitmap_scratch, MemoryChunk::kHeaderSize + kPointerSize), 1);
3204 j(second_bit == 1 ? not_zero : zero, has_color, has_color_distance);
3209 void MacroAssembler::GetMarkBits(Register addr_reg,
3210 Register bitmap_reg,
3211 Register mask_reg) {
3212 ASSERT(!AreAliased(addr_reg, mask_reg, bitmap_reg, ecx));
3213 mov(bitmap_reg, Immediate(~Page::kPageAlignmentMask));
3214 and_(bitmap_reg, addr_reg);
3217 Bitmap::kBitsPerCellLog2 + kPointerSizeLog2 - Bitmap::kBytesPerCellLog2;
3220 (Page::kPageAlignmentMask >> shift) & ~(Bitmap::kBytesPerCell - 1));
3222 add(bitmap_reg, ecx);
3224 shr(ecx, kPointerSizeLog2);
3225 and_(ecx, (1 << Bitmap::kBitsPerCellLog2) - 1);
3226 mov(mask_reg, Immediate(1));
3231 void MacroAssembler::EnsureNotWhite(
3233 Register bitmap_scratch,
3234 Register mask_scratch,
3235 Label* value_is_white_and_not_data,
3236 Label::Distance distance) {
3237 ASSERT(!AreAliased(value, bitmap_scratch, mask_scratch, ecx));
3238 GetMarkBits(value, bitmap_scratch, mask_scratch);
3240 // If the value is black or grey we don't need to do anything.
3241 ASSERT(strcmp(Marking::kWhiteBitPattern, "00") == 0);
3242 ASSERT(strcmp(Marking::kBlackBitPattern, "10") == 0);
3243 ASSERT(strcmp(Marking::kGreyBitPattern, "11") == 0);
3244 ASSERT(strcmp(Marking::kImpossibleBitPattern, "01") == 0);
3248 // Since both black and grey have a 1 in the first position and white does
3249 // not have a 1 there we only need to check one bit.
3250 test(mask_scratch, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
3251 j(not_zero, &done, Label::kNear);
3253 if (emit_debug_code()) {
3254 // Check for impossible bit pattern.
3257 // shl. May overflow making the check conservative.
3258 add(mask_scratch, mask_scratch);
3259 test(mask_scratch, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
3260 j(zero, &ok, Label::kNear);
3266 // Value is white. We check whether it is data that doesn't need scanning.
3267 // Currently only checks for HeapNumber and non-cons strings.
3268 Register map = ecx; // Holds map while checking type.
3269 Register length = ecx; // Holds length of object after checking type.
3270 Label not_heap_number;
3271 Label is_data_object;
3273 // Check for heap-number
3274 mov(map, FieldOperand(value, HeapObject::kMapOffset));
3275 cmp(map, isolate()->factory()->heap_number_map());
3276 j(not_equal, ¬_heap_number, Label::kNear);
3277 mov(length, Immediate(HeapNumber::kSize));
3278 jmp(&is_data_object, Label::kNear);
3280 bind(¬_heap_number);
3281 // Check for strings.
3282 ASSERT(kIsIndirectStringTag == 1 && kIsIndirectStringMask == 1);
3283 ASSERT(kNotStringTag == 0x80 && kIsNotStringMask == 0x80);
3284 // If it's a string and it's not a cons string then it's an object containing
3286 Register instance_type = ecx;
3287 movzx_b(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
3288 test_b(instance_type, kIsIndirectStringMask | kIsNotStringMask);
3289 j(not_zero, value_is_white_and_not_data);
3290 // It's a non-indirect (non-cons and non-slice) string.
3291 // If it's external, the length is just ExternalString::kSize.
3292 // Otherwise it's String::kHeaderSize + string->length() * (1 or 2).
3294 // External strings are the only ones with the kExternalStringTag bit
3296 ASSERT_EQ(0, kSeqStringTag & kExternalStringTag);
3297 ASSERT_EQ(0, kConsStringTag & kExternalStringTag);
3298 test_b(instance_type, kExternalStringTag);
3299 j(zero, ¬_external, Label::kNear);
3300 mov(length, Immediate(ExternalString::kSize));
3301 jmp(&is_data_object, Label::kNear);
3303 bind(¬_external);
3304 // Sequential string, either ASCII or UC16.
3305 ASSERT(kOneByteStringTag == 0x04);
3306 and_(length, Immediate(kStringEncodingMask));
3307 xor_(length, Immediate(kStringEncodingMask));
3308 add(length, Immediate(0x04));
3309 // Value now either 4 (if ASCII) or 8 (if UC16), i.e., char-size shifted
3310 // by 2. If we multiply the string length as smi by this, it still
3311 // won't overflow a 32-bit value.
3312 ASSERT_EQ(SeqOneByteString::kMaxSize, SeqTwoByteString::kMaxSize);
3313 ASSERT(SeqOneByteString::kMaxSize <=
3314 static_cast<int>(0xffffffffu >> (2 + kSmiTagSize)));
3315 imul(length, FieldOperand(value, String::kLengthOffset));
3316 shr(length, 2 + kSmiTagSize + kSmiShiftSize);
3317 add(length, Immediate(SeqString::kHeaderSize + kObjectAlignmentMask));
3318 and_(length, Immediate(~kObjectAlignmentMask));
3320 bind(&is_data_object);
3321 // Value is a data object, and it is white. Mark it black. Since we know
3322 // that the object is white we can make it black by flipping one bit.
3323 or_(Operand(bitmap_scratch, MemoryChunk::kHeaderSize), mask_scratch);
3325 and_(bitmap_scratch, Immediate(~Page::kPageAlignmentMask));
3326 add(Operand(bitmap_scratch, MemoryChunk::kLiveBytesOffset),
3328 if (emit_debug_code()) {
3329 mov(length, Operand(bitmap_scratch, MemoryChunk::kLiveBytesOffset));
3330 cmp(length, Operand(bitmap_scratch, MemoryChunk::kSizeOffset));
3331 Check(less_equal, kLiveBytesCountOverflowChunkSize);
3338 void MacroAssembler::EnumLength(Register dst, Register map) {
3339 STATIC_ASSERT(Map::EnumLengthBits::kShift == 0);
3340 mov(dst, FieldOperand(map, Map::kBitField3Offset));
3341 and_(dst, Immediate(Map::EnumLengthBits::kMask));
3346 void MacroAssembler::CheckEnumCache(Label* call_runtime) {
3350 // Check if the enum length field is properly initialized, indicating that
3351 // there is an enum cache.
3352 mov(ebx, FieldOperand(ecx, HeapObject::kMapOffset));
3354 EnumLength(edx, ebx);
3355 cmp(edx, Immediate(Smi::FromInt(kInvalidEnumCacheSentinel)));
3356 j(equal, call_runtime);
3361 mov(ebx, FieldOperand(ecx, HeapObject::kMapOffset));
3363 // For all objects but the receiver, check that the cache is empty.
3364 EnumLength(edx, ebx);
3365 cmp(edx, Immediate(Smi::FromInt(0)));
3366 j(not_equal, call_runtime);
3370 // Check that there are no elements. Register rcx contains the current JS
3371 // object we've reached through the prototype chain.
3373 mov(ecx, FieldOperand(ecx, JSObject::kElementsOffset));
3374 cmp(ecx, isolate()->factory()->empty_fixed_array());
3375 j(equal, &no_elements);
3377 // Second chance, the object may be using the empty slow element dictionary.
3378 cmp(ecx, isolate()->factory()->empty_slow_element_dictionary());
3379 j(not_equal, call_runtime);
3382 mov(ecx, FieldOperand(ebx, Map::kPrototypeOffset));
3383 cmp(ecx, isolate()->factory()->null_value());
3384 j(not_equal, &next);
3388 void MacroAssembler::TestJSArrayForAllocationMemento(
3389 Register receiver_reg,
3390 Register scratch_reg,
3391 Label* no_memento_found) {
3392 ExternalReference new_space_start =
3393 ExternalReference::new_space_start(isolate());
3394 ExternalReference new_space_allocation_top =
3395 ExternalReference::new_space_allocation_top_address(isolate());
3397 lea(scratch_reg, Operand(receiver_reg,
3398 JSArray::kSize + AllocationMemento::kSize - kHeapObjectTag));
3399 cmp(scratch_reg, Immediate(new_space_start));
3400 j(less, no_memento_found);
3401 cmp(scratch_reg, Operand::StaticVariable(new_space_allocation_top));
3402 j(greater, no_memento_found);
3403 cmp(MemOperand(scratch_reg, -AllocationMemento::kSize),
3404 Immediate(isolate()->factory()->allocation_memento_map()));
3408 void MacroAssembler::JumpIfDictionaryInPrototypeChain(
3413 ASSERT(!scratch1.is(scratch0));
3414 Factory* factory = isolate()->factory();
3415 Register current = scratch0;
3418 // scratch contained elements pointer.
3419 mov(current, object);
3421 // Loop based on the map going up the prototype chain.
3423 mov(current, FieldOperand(current, HeapObject::kMapOffset));
3424 mov(scratch1, FieldOperand(current, Map::kBitField2Offset));
3425 DecodeField<Map::ElementsKindBits>(scratch1);
3426 cmp(scratch1, Immediate(DICTIONARY_ELEMENTS));
3428 mov(current, FieldOperand(current, Map::kPrototypeOffset));
3429 cmp(current, Immediate(factory->null_value()));
3430 j(not_equal, &loop_again);
3434 void MacroAssembler::TruncatingDiv(Register dividend, int32_t divisor) {
3435 ASSERT(!dividend.is(eax));
3436 ASSERT(!dividend.is(edx));
3437 MultiplierAndShift ms(divisor);
3438 mov(eax, Immediate(ms.multiplier()));
3440 if (divisor > 0 && ms.multiplier() < 0) add(edx, dividend);
3441 if (divisor < 0 && ms.multiplier() > 0) sub(edx, dividend);
3442 if (ms.shift() > 0) sar(edx, ms.shift());
3449 void MacroAssembler::absps(XMMRegister dst) {
3450 static const struct V8_ALIGNED(16) {
3455 } float_absolute_constant =
3456 { 0x7FFFFFFF, 0x7FFFFFFF, 0x7FFFFFFF, 0x7FFFFFFF };
3458 Operand(reinterpret_cast<int32_t>(&float_absolute_constant),
3459 RelocInfo::NONE32));
3463 void MacroAssembler::abspd(XMMRegister dst) {
3464 static const struct V8_ALIGNED(16) {
3469 } double_absolute_constant =
3470 { 0xFFFFFFFF, 0x7FFFFFFF, 0xFFFFFFFF, 0x7FFFFFFF };
3472 Operand(reinterpret_cast<int32_t>(&double_absolute_constant),
3473 RelocInfo::NONE32));
3477 void MacroAssembler::notps(XMMRegister dst) {
3478 static const struct V8_ALIGNED(16) {
3483 } float_not_constant =
3484 { 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF };
3486 Operand(reinterpret_cast<int32_t>(&float_not_constant),
3487 RelocInfo::NONE32));
3491 void MacroAssembler::negateps(XMMRegister dst) {
3492 static const struct V8_ALIGNED(16) {
3497 } float_negate_constant =
3498 { 0x80000000, 0x80000000, 0x80000000, 0x80000000 };
3500 Operand(reinterpret_cast<int32_t>(&float_negate_constant),
3501 RelocInfo::NONE32));
3505 void MacroAssembler::negatepd(XMMRegister dst) {
3506 static const struct V8_ALIGNED(16) {
3511 } double_negate_constant =
3512 { 0x00000000, 0x80000000, 0x00000000, 0x80000000 };
3514 Operand(reinterpret_cast<int32_t>(&double_negate_constant),
3515 RelocInfo::NONE32));
3519 void MacroAssembler::pnegd(XMMRegister dst) {
3520 static const struct V8_ALIGNED(16) {
3525 } int32_one_constant = { 0x1, 0x1, 0x1, 0x1 };
3528 Operand(reinterpret_cast<int32_t>(&int32_one_constant),
3529 RelocInfo::NONE32));
3533 } } // namespace v8::internal
3535 #endif // V8_TARGET_ARCH_IA32