1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
7 #if V8_TARGET_ARCH_IA32
9 #include "src/base/bits.h"
10 #include "src/base/division-by-constant.h"
11 #include "src/bootstrapper.h"
12 #include "src/codegen.h"
13 #include "src/cpu-profiler.h"
14 #include "src/debug.h"
15 #include "src/isolate-inl.h"
16 #include "src/runtime/runtime.h"
17 #include "src/serialize.h"
22 // -------------------------------------------------------------------------
23 // MacroAssembler implementation.
25 MacroAssembler::MacroAssembler(Isolate* arg_isolate, void* buffer, int size)
26 : Assembler(arg_isolate, buffer, size),
27 generating_stub_(false),
29 if (isolate() != NULL) {
30 // TODO(titzer): should we just use a null handle here instead?
31 code_object_ = Handle<Object>(isolate()->heap()->undefined_value(),
37 void MacroAssembler::Load(Register dst, const Operand& src, Representation r) {
38 DCHECK(!r.IsDouble());
41 } else if (r.IsUInteger8()) {
43 } else if (r.IsInteger16()) {
45 } else if (r.IsUInteger16()) {
53 void MacroAssembler::Store(Register src, const Operand& dst, Representation r) {
54 DCHECK(!r.IsDouble());
55 if (r.IsInteger8() || r.IsUInteger8()) {
57 } else if (r.IsInteger16() || r.IsUInteger16()) {
60 if (r.IsHeapObject()) {
62 } else if (r.IsSmi()) {
70 void MacroAssembler::LoadRoot(Register destination, Heap::RootListIndex index) {
71 if (isolate()->heap()->RootCanBeTreatedAsConstant(index)) {
72 Handle<Object> value(&isolate()->heap()->roots_array_start()[index]);
73 mov(destination, value);
76 ExternalReference roots_array_start =
77 ExternalReference::roots_array_start(isolate());
78 mov(destination, Immediate(index));
79 mov(destination, Operand::StaticArray(destination,
85 void MacroAssembler::StoreRoot(Register source,
87 Heap::RootListIndex index) {
88 DCHECK(Heap::RootCanBeWrittenAfterInitialization(index));
89 ExternalReference roots_array_start =
90 ExternalReference::roots_array_start(isolate());
91 mov(scratch, Immediate(index));
92 mov(Operand::StaticArray(scratch, times_pointer_size, roots_array_start),
97 void MacroAssembler::CompareRoot(Register with,
99 Heap::RootListIndex index) {
100 ExternalReference roots_array_start =
101 ExternalReference::roots_array_start(isolate());
102 mov(scratch, Immediate(index));
103 cmp(with, Operand::StaticArray(scratch,
109 void MacroAssembler::CompareRoot(Register with, Heap::RootListIndex index) {
110 DCHECK(isolate()->heap()->RootCanBeTreatedAsConstant(index));
111 Handle<Object> value(&isolate()->heap()->roots_array_start()[index]);
116 void MacroAssembler::CompareRoot(const Operand& with,
117 Heap::RootListIndex index) {
118 DCHECK(isolate()->heap()->RootCanBeTreatedAsConstant(index));
119 Handle<Object> value(&isolate()->heap()->roots_array_start()[index]);
124 void MacroAssembler::InNewSpace(
128 Label* condition_met,
129 Label::Distance condition_met_distance) {
130 DCHECK(cc == equal || cc == not_equal);
131 if (scratch.is(object)) {
132 and_(scratch, Immediate(~Page::kPageAlignmentMask));
134 mov(scratch, Immediate(~Page::kPageAlignmentMask));
135 and_(scratch, object);
137 // Check that we can use a test_b.
138 DCHECK(MemoryChunk::IN_FROM_SPACE < 8);
139 DCHECK(MemoryChunk::IN_TO_SPACE < 8);
140 int mask = (1 << MemoryChunk::IN_FROM_SPACE)
141 | (1 << MemoryChunk::IN_TO_SPACE);
142 // If non-zero, the page belongs to new-space.
143 test_b(Operand(scratch, MemoryChunk::kFlagsOffset),
144 static_cast<uint8_t>(mask));
145 j(cc, condition_met, condition_met_distance);
149 void MacroAssembler::RememberedSetHelper(
150 Register object, // Only used for debug checks.
153 SaveFPRegsMode save_fp,
154 MacroAssembler::RememberedSetFinalAction and_then) {
156 if (emit_debug_code()) {
158 JumpIfNotInNewSpace(object, scratch, &ok, Label::kNear);
162 // Load store buffer top.
163 ExternalReference store_buffer =
164 ExternalReference::store_buffer_top(isolate());
165 mov(scratch, Operand::StaticVariable(store_buffer));
166 // Store pointer to buffer.
167 mov(Operand(scratch, 0), addr);
168 // Increment buffer top.
169 add(scratch, Immediate(kPointerSize));
170 // Write back new top of buffer.
171 mov(Operand::StaticVariable(store_buffer), scratch);
172 // Call stub on end of buffer.
173 // Check for end of buffer.
174 test(scratch, Immediate(StoreBuffer::kStoreBufferOverflowBit));
175 if (and_then == kReturnAtEnd) {
176 Label buffer_overflowed;
177 j(not_equal, &buffer_overflowed, Label::kNear);
179 bind(&buffer_overflowed);
181 DCHECK(and_then == kFallThroughAtEnd);
182 j(equal, &done, Label::kNear);
184 StoreBufferOverflowStub store_buffer_overflow(isolate(), save_fp);
185 CallStub(&store_buffer_overflow);
186 if (and_then == kReturnAtEnd) {
189 DCHECK(and_then == kFallThroughAtEnd);
195 void MacroAssembler::ClampDoubleToUint8(XMMRegister input_reg,
196 XMMRegister scratch_reg,
197 Register result_reg) {
200 xorps(scratch_reg, scratch_reg);
201 cvtsd2si(result_reg, input_reg);
202 test(result_reg, Immediate(0xFFFFFF00));
203 j(zero, &done, Label::kNear);
204 cmp(result_reg, Immediate(0x1));
205 j(overflow, &conv_failure, Label::kNear);
206 mov(result_reg, Immediate(0));
207 setcc(sign, result_reg);
208 sub(result_reg, Immediate(1));
209 and_(result_reg, Immediate(255));
210 jmp(&done, Label::kNear);
212 Move(result_reg, Immediate(0));
213 ucomisd(input_reg, scratch_reg);
214 j(below, &done, Label::kNear);
215 Move(result_reg, Immediate(255));
220 void MacroAssembler::ClampUint8(Register reg) {
222 test(reg, Immediate(0xFFFFFF00));
223 j(zero, &done, Label::kNear);
224 setcc(negative, reg); // 1 if negative, 0 if positive.
225 dec_b(reg); // 0 if negative, 255 if positive.
230 void MacroAssembler::SlowTruncateToI(Register result_reg,
233 DoubleToIStub stub(isolate(), input_reg, result_reg, offset, true);
234 call(stub.GetCode(), RelocInfo::CODE_TARGET);
238 void MacroAssembler::TruncateDoubleToI(Register result_reg,
239 XMMRegister input_reg) {
241 cvttsd2si(result_reg, Operand(input_reg));
242 cmp(result_reg, 0x1);
243 j(no_overflow, &done, Label::kNear);
245 sub(esp, Immediate(kDoubleSize));
246 movsd(MemOperand(esp, 0), input_reg);
247 SlowTruncateToI(result_reg, esp, 0);
248 add(esp, Immediate(kDoubleSize));
253 void MacroAssembler::DoubleToI(Register result_reg, XMMRegister input_reg,
255 MinusZeroMode minus_zero_mode,
256 Label* lost_precision, Label* is_nan,
257 Label* minus_zero, Label::Distance dst) {
258 DCHECK(!input_reg.is(scratch));
259 cvttsd2si(result_reg, Operand(input_reg));
260 Cvtsi2sd(scratch, Operand(result_reg));
261 ucomisd(scratch, input_reg);
262 j(not_equal, lost_precision, dst);
263 j(parity_even, is_nan, dst);
264 if (minus_zero_mode == FAIL_ON_MINUS_ZERO) {
266 // The integer converted back is equal to the original. We
267 // only have to test if we got -0 as an input.
268 test(result_reg, Operand(result_reg));
269 j(not_zero, &done, Label::kNear);
270 movmskpd(result_reg, input_reg);
271 // Bit 0 contains the sign of the double in input_reg.
272 // If input was positive, we are ok and return 0, otherwise
273 // jump to minus_zero.
275 j(not_zero, minus_zero, dst);
281 void MacroAssembler::TruncateHeapNumberToI(Register result_reg,
282 Register input_reg) {
283 Label done, slow_case;
285 if (CpuFeatures::IsSupported(SSE3)) {
286 CpuFeatureScope scope(this, SSE3);
288 // Use more powerful conversion when sse3 is available.
289 // Load x87 register with heap number.
290 fld_d(FieldOperand(input_reg, HeapNumber::kValueOffset));
291 // Get exponent alone and check for too-big exponent.
292 mov(result_reg, FieldOperand(input_reg, HeapNumber::kExponentOffset));
293 and_(result_reg, HeapNumber::kExponentMask);
294 const uint32_t kTooBigExponent =
295 (HeapNumber::kExponentBias + 63) << HeapNumber::kExponentShift;
296 cmp(Operand(result_reg), Immediate(kTooBigExponent));
297 j(greater_equal, &slow_case, Label::kNear);
299 // Reserve space for 64 bit answer.
300 sub(Operand(esp), Immediate(kDoubleSize));
301 // Do conversion, which cannot fail because we checked the exponent.
302 fisttp_d(Operand(esp, 0));
303 mov(result_reg, Operand(esp, 0)); // Low word of answer is the result.
304 add(Operand(esp), Immediate(kDoubleSize));
305 jmp(&done, Label::kNear);
309 if (input_reg.is(result_reg)) {
310 // Input is clobbered. Restore number from fpu stack
311 sub(Operand(esp), Immediate(kDoubleSize));
312 fstp_d(Operand(esp, 0));
313 SlowTruncateToI(result_reg, esp, 0);
314 add(esp, Immediate(kDoubleSize));
317 SlowTruncateToI(result_reg, input_reg);
320 movsd(xmm0, FieldOperand(input_reg, HeapNumber::kValueOffset));
321 cvttsd2si(result_reg, Operand(xmm0));
322 cmp(result_reg, 0x1);
323 j(no_overflow, &done, Label::kNear);
324 // Check if the input was 0x8000000 (kMinInt).
325 // If no, then we got an overflow and we deoptimize.
326 ExternalReference min_int = ExternalReference::address_of_min_int();
327 ucomisd(xmm0, Operand::StaticVariable(min_int));
328 j(not_equal, &slow_case, Label::kNear);
329 j(parity_even, &slow_case, Label::kNear); // NaN.
330 jmp(&done, Label::kNear);
334 if (input_reg.is(result_reg)) {
335 // Input is clobbered. Restore number from double scratch.
336 sub(esp, Immediate(kDoubleSize));
337 movsd(MemOperand(esp, 0), xmm0);
338 SlowTruncateToI(result_reg, esp, 0);
339 add(esp, Immediate(kDoubleSize));
341 SlowTruncateToI(result_reg, input_reg);
348 void MacroAssembler::LoadUint32(XMMRegister dst, const Operand& src) {
350 cmp(src, Immediate(0));
351 ExternalReference uint32_bias = ExternalReference::address_of_uint32_bias();
353 j(not_sign, &done, Label::kNear);
354 addsd(dst, Operand::StaticVariable(uint32_bias));
359 void MacroAssembler::RecordWriteArray(
363 SaveFPRegsMode save_fp,
364 RememberedSetAction remembered_set_action,
366 PointersToHereCheck pointers_to_here_check_for_value) {
367 // First, check if a write barrier is even needed. The tests below
368 // catch stores of Smis.
371 // Skip barrier if writing a smi.
372 if (smi_check == INLINE_SMI_CHECK) {
373 DCHECK_EQ(0, kSmiTag);
374 test(value, Immediate(kSmiTagMask));
378 // Array access: calculate the destination address in the same manner as
379 // KeyedStoreIC::GenerateGeneric. Multiply a smi by 2 to get an offset
380 // into an array of words.
381 Register dst = index;
382 lea(dst, Operand(object, index, times_half_pointer_size,
383 FixedArray::kHeaderSize - kHeapObjectTag));
385 RecordWrite(object, dst, value, save_fp, remembered_set_action,
386 OMIT_SMI_CHECK, pointers_to_here_check_for_value);
390 // Clobber clobbered input registers when running with the debug-code flag
391 // turned on to provoke errors.
392 if (emit_debug_code()) {
393 mov(value, Immediate(bit_cast<int32_t>(kZapValue)));
394 mov(index, Immediate(bit_cast<int32_t>(kZapValue)));
399 void MacroAssembler::RecordWriteField(
404 SaveFPRegsMode save_fp,
405 RememberedSetAction remembered_set_action,
407 PointersToHereCheck pointers_to_here_check_for_value) {
408 // First, check if a write barrier is even needed. The tests below
409 // catch stores of Smis.
412 // Skip barrier if writing a smi.
413 if (smi_check == INLINE_SMI_CHECK) {
414 JumpIfSmi(value, &done, Label::kNear);
417 // Although the object register is tagged, the offset is relative to the start
418 // of the object, so so offset must be a multiple of kPointerSize.
419 DCHECK(IsAligned(offset, kPointerSize));
421 lea(dst, FieldOperand(object, offset));
422 if (emit_debug_code()) {
424 test_b(dst, (1 << kPointerSizeLog2) - 1);
425 j(zero, &ok, Label::kNear);
430 RecordWrite(object, dst, value, save_fp, remembered_set_action,
431 OMIT_SMI_CHECK, pointers_to_here_check_for_value);
435 // Clobber clobbered input registers when running with the debug-code flag
436 // turned on to provoke errors.
437 if (emit_debug_code()) {
438 mov(value, Immediate(bit_cast<int32_t>(kZapValue)));
439 mov(dst, Immediate(bit_cast<int32_t>(kZapValue)));
444 void MacroAssembler::RecordWriteForMap(
449 SaveFPRegsMode save_fp) {
452 Register address = scratch1;
453 Register value = scratch2;
454 if (emit_debug_code()) {
456 lea(address, FieldOperand(object, HeapObject::kMapOffset));
457 test_b(address, (1 << kPointerSizeLog2) - 1);
458 j(zero, &ok, Label::kNear);
463 DCHECK(!object.is(value));
464 DCHECK(!object.is(address));
465 DCHECK(!value.is(address));
466 AssertNotSmi(object);
468 if (!FLAG_incremental_marking) {
472 // Compute the address.
473 lea(address, FieldOperand(object, HeapObject::kMapOffset));
475 // A single check of the map's pages interesting flag suffices, since it is
476 // only set during incremental collection, and then it's also guaranteed that
477 // the from object's page's interesting flag is also set. This optimization
478 // relies on the fact that maps can never be in new space.
479 DCHECK(!isolate()->heap()->InNewSpace(*map));
480 CheckPageFlagForMap(map,
481 MemoryChunk::kPointersToHereAreInterestingMask,
486 RecordWriteStub stub(isolate(), object, value, address, OMIT_REMEMBERED_SET,
492 // Count number of write barriers in generated code.
493 isolate()->counters()->write_barriers_static()->Increment();
494 IncrementCounter(isolate()->counters()->write_barriers_dynamic(), 1);
496 // Clobber clobbered input registers when running with the debug-code flag
497 // turned on to provoke errors.
498 if (emit_debug_code()) {
499 mov(value, Immediate(bit_cast<int32_t>(kZapValue)));
500 mov(scratch1, Immediate(bit_cast<int32_t>(kZapValue)));
501 mov(scratch2, Immediate(bit_cast<int32_t>(kZapValue)));
506 void MacroAssembler::RecordWrite(
510 SaveFPRegsMode fp_mode,
511 RememberedSetAction remembered_set_action,
513 PointersToHereCheck pointers_to_here_check_for_value) {
514 DCHECK(!object.is(value));
515 DCHECK(!object.is(address));
516 DCHECK(!value.is(address));
517 AssertNotSmi(object);
519 if (remembered_set_action == OMIT_REMEMBERED_SET &&
520 !FLAG_incremental_marking) {
524 if (emit_debug_code()) {
526 cmp(value, Operand(address, 0));
527 j(equal, &ok, Label::kNear);
532 // First, check if a write barrier is even needed. The tests below
533 // catch stores of Smis and stores into young gen.
536 if (smi_check == INLINE_SMI_CHECK) {
537 // Skip barrier if writing a smi.
538 JumpIfSmi(value, &done, Label::kNear);
541 if (pointers_to_here_check_for_value != kPointersToHereAreAlwaysInteresting) {
543 value, // Used as scratch.
544 MemoryChunk::kPointersToHereAreInterestingMask,
549 CheckPageFlag(object,
550 value, // Used as scratch.
551 MemoryChunk::kPointersFromHereAreInterestingMask,
556 RecordWriteStub stub(isolate(), object, value, address, remembered_set_action,
562 // Count number of write barriers in generated code.
563 isolate()->counters()->write_barriers_static()->Increment();
564 IncrementCounter(isolate()->counters()->write_barriers_dynamic(), 1);
566 // Clobber clobbered registers when running with the debug-code flag
567 // turned on to provoke errors.
568 if (emit_debug_code()) {
569 mov(address, Immediate(bit_cast<int32_t>(kZapValue)));
570 mov(value, Immediate(bit_cast<int32_t>(kZapValue)));
575 void MacroAssembler::DebugBreak() {
576 Move(eax, Immediate(0));
577 mov(ebx, Immediate(ExternalReference(Runtime::kDebugBreak, isolate())));
578 CEntryStub ces(isolate(), 1);
579 call(ces.GetCode(), RelocInfo::DEBUG_BREAK);
583 void MacroAssembler::Cvtsi2sd(XMMRegister dst, const Operand& src) {
589 bool MacroAssembler::IsUnsafeImmediate(const Immediate& x) {
590 static const int kMaxImmediateBits = 17;
591 if (!RelocInfo::IsNone(x.rmode_)) return false;
592 return !is_intn(x.x_, kMaxImmediateBits);
596 void MacroAssembler::SafeMove(Register dst, const Immediate& x) {
597 if (IsUnsafeImmediate(x) && jit_cookie() != 0) {
598 Move(dst, Immediate(x.x_ ^ jit_cookie()));
599 xor_(dst, jit_cookie());
606 void MacroAssembler::SafePush(const Immediate& x) {
607 if (IsUnsafeImmediate(x) && jit_cookie() != 0) {
608 push(Immediate(x.x_ ^ jit_cookie()));
609 xor_(Operand(esp, 0), Immediate(jit_cookie()));
616 void MacroAssembler::CmpObjectType(Register heap_object,
619 mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
620 CmpInstanceType(map, type);
624 void MacroAssembler::CmpInstanceType(Register map, InstanceType type) {
625 cmpb(FieldOperand(map, Map::kInstanceTypeOffset),
626 static_cast<int8_t>(type));
630 void MacroAssembler::CheckFastElements(Register map,
632 Label::Distance distance) {
633 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
634 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
635 STATIC_ASSERT(FAST_ELEMENTS == 2);
636 STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3);
637 cmpb(FieldOperand(map, Map::kBitField2Offset),
638 Map::kMaximumBitField2FastHoleyElementValue);
639 j(above, fail, distance);
643 void MacroAssembler::CheckFastObjectElements(Register map,
645 Label::Distance distance) {
646 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
647 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
648 STATIC_ASSERT(FAST_ELEMENTS == 2);
649 STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3);
650 cmpb(FieldOperand(map, Map::kBitField2Offset),
651 Map::kMaximumBitField2FastHoleySmiElementValue);
652 j(below_equal, fail, distance);
653 cmpb(FieldOperand(map, Map::kBitField2Offset),
654 Map::kMaximumBitField2FastHoleyElementValue);
655 j(above, fail, distance);
659 void MacroAssembler::CheckFastSmiElements(Register map,
661 Label::Distance distance) {
662 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
663 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
664 cmpb(FieldOperand(map, Map::kBitField2Offset),
665 Map::kMaximumBitField2FastHoleySmiElementValue);
666 j(above, fail, distance);
670 void MacroAssembler::StoreNumberToDoubleElements(
671 Register maybe_number,
675 XMMRegister scratch2,
677 int elements_offset) {
678 Label smi_value, done, maybe_nan, not_nan, is_nan, have_double_value;
679 JumpIfSmi(maybe_number, &smi_value, Label::kNear);
681 CheckMap(maybe_number,
682 isolate()->factory()->heap_number_map(),
686 // Double value, canonicalize NaN.
687 uint32_t offset = HeapNumber::kValueOffset + sizeof(kHoleNanLower32);
688 cmp(FieldOperand(maybe_number, offset),
689 Immediate(kNaNOrInfinityLowerBoundUpper32));
690 j(greater_equal, &maybe_nan, Label::kNear);
693 ExternalReference canonical_nan_reference =
694 ExternalReference::address_of_canonical_non_hole_nan();
695 movsd(scratch2, FieldOperand(maybe_number, HeapNumber::kValueOffset));
696 bind(&have_double_value);
697 movsd(FieldOperand(elements, key, times_4,
698 FixedDoubleArray::kHeaderSize - elements_offset),
703 // Could be NaN or Infinity. If fraction is not zero, it's NaN, otherwise
704 // it's an Infinity, and the non-NaN code path applies.
705 j(greater, &is_nan, Label::kNear);
706 cmp(FieldOperand(maybe_number, HeapNumber::kValueOffset), Immediate(0));
709 movsd(scratch2, Operand::StaticVariable(canonical_nan_reference));
710 jmp(&have_double_value, Label::kNear);
713 // Value is a smi. Convert to a double and store.
714 // Preserve original value.
715 mov(scratch1, maybe_number);
717 Cvtsi2sd(scratch2, scratch1);
718 movsd(FieldOperand(elements, key, times_4,
719 FixedDoubleArray::kHeaderSize - elements_offset),
725 void MacroAssembler::CompareMap(Register obj, Handle<Map> map) {
726 cmp(FieldOperand(obj, HeapObject::kMapOffset), map);
730 void MacroAssembler::CheckMap(Register obj,
733 SmiCheckType smi_check_type) {
734 if (smi_check_type == DO_SMI_CHECK) {
735 JumpIfSmi(obj, fail);
738 CompareMap(obj, map);
743 void MacroAssembler::DispatchMap(Register obj,
746 Handle<Code> success,
747 SmiCheckType smi_check_type) {
749 if (smi_check_type == DO_SMI_CHECK) {
750 JumpIfSmi(obj, &fail);
752 cmp(FieldOperand(obj, HeapObject::kMapOffset), Immediate(map));
759 Condition MacroAssembler::IsObjectStringType(Register heap_object,
761 Register instance_type) {
762 mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
763 movzx_b(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
764 STATIC_ASSERT(kNotStringTag != 0);
765 test(instance_type, Immediate(kIsNotStringMask));
770 Condition MacroAssembler::IsObjectNameType(Register heap_object,
772 Register instance_type) {
773 mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
774 movzx_b(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
775 cmpb(instance_type, static_cast<uint8_t>(LAST_NAME_TYPE));
780 void MacroAssembler::IsObjectJSObjectType(Register heap_object,
784 mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
785 IsInstanceJSObjectType(map, scratch, fail);
789 void MacroAssembler::IsInstanceJSObjectType(Register map,
792 movzx_b(scratch, FieldOperand(map, Map::kInstanceTypeOffset));
793 sub(scratch, Immediate(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
795 LAST_NONCALLABLE_SPEC_OBJECT_TYPE - FIRST_NONCALLABLE_SPEC_OBJECT_TYPE);
800 void MacroAssembler::FCmp() {
806 void MacroAssembler::AssertNumber(Register object) {
807 if (emit_debug_code()) {
809 JumpIfSmi(object, &ok);
810 cmp(FieldOperand(object, HeapObject::kMapOffset),
811 isolate()->factory()->heap_number_map());
812 Check(equal, kOperandNotANumber);
818 void MacroAssembler::AssertSmi(Register object) {
819 if (emit_debug_code()) {
820 test(object, Immediate(kSmiTagMask));
821 Check(equal, kOperandIsNotASmi);
826 void MacroAssembler::AssertString(Register object) {
827 if (emit_debug_code()) {
828 test(object, Immediate(kSmiTagMask));
829 Check(not_equal, kOperandIsASmiAndNotAString);
831 mov(object, FieldOperand(object, HeapObject::kMapOffset));
832 CmpInstanceType(object, FIRST_NONSTRING_TYPE);
834 Check(below, kOperandIsNotAString);
839 void MacroAssembler::AssertName(Register object) {
840 if (emit_debug_code()) {
841 test(object, Immediate(kSmiTagMask));
842 Check(not_equal, kOperandIsASmiAndNotAName);
844 mov(object, FieldOperand(object, HeapObject::kMapOffset));
845 CmpInstanceType(object, LAST_NAME_TYPE);
847 Check(below_equal, kOperandIsNotAName);
852 void MacroAssembler::AssertUndefinedOrAllocationSite(Register object) {
853 if (emit_debug_code()) {
855 AssertNotSmi(object);
856 cmp(object, isolate()->factory()->undefined_value());
857 j(equal, &done_checking);
858 cmp(FieldOperand(object, 0),
859 Immediate(isolate()->factory()->allocation_site_map()));
860 Assert(equal, kExpectedUndefinedOrCell);
861 bind(&done_checking);
866 void MacroAssembler::AssertNotSmi(Register object) {
867 if (emit_debug_code()) {
868 test(object, Immediate(kSmiTagMask));
869 Check(not_equal, kOperandIsASmi);
874 void MacroAssembler::StubPrologue() {
875 push(ebp); // Caller's frame pointer.
877 push(esi); // Callee's context.
878 push(Immediate(Smi::FromInt(StackFrame::STUB)));
882 void MacroAssembler::Prologue(bool code_pre_aging) {
883 PredictableCodeSizeScope predictible_code_size_scope(this,
884 kNoCodeAgeSequenceLength);
885 if (code_pre_aging) {
887 call(isolate()->builtins()->MarkCodeAsExecutedOnce(),
888 RelocInfo::CODE_AGE_SEQUENCE);
889 Nop(kNoCodeAgeSequenceLength - Assembler::kCallInstructionLength);
891 push(ebp); // Caller's frame pointer.
893 push(esi); // Callee's context.
894 push(edi); // Callee's JS function.
899 void MacroAssembler::EnterFrame(StackFrame::Type type,
900 bool load_constant_pool_pointer_reg) {
901 // Out-of-line constant pool not implemented on ia32.
906 void MacroAssembler::EnterFrame(StackFrame::Type type) {
910 push(Immediate(Smi::FromInt(type)));
911 push(Immediate(CodeObject()));
912 if (emit_debug_code()) {
913 cmp(Operand(esp, 0), Immediate(isolate()->factory()->undefined_value()));
914 Check(not_equal, kCodeObjectNotProperlyPatched);
919 void MacroAssembler::LeaveFrame(StackFrame::Type type) {
920 if (emit_debug_code()) {
921 cmp(Operand(ebp, StandardFrameConstants::kMarkerOffset),
922 Immediate(Smi::FromInt(type)));
923 Check(equal, kStackFrameTypesMustMatch);
929 void MacroAssembler::EnterExitFramePrologue() {
930 // Set up the frame structure on the stack.
931 DCHECK(ExitFrameConstants::kCallerSPDisplacement == +2 * kPointerSize);
932 DCHECK(ExitFrameConstants::kCallerPCOffset == +1 * kPointerSize);
933 DCHECK(ExitFrameConstants::kCallerFPOffset == 0 * kPointerSize);
937 // Reserve room for entry stack pointer and push the code object.
938 DCHECK(ExitFrameConstants::kSPOffset == -1 * kPointerSize);
939 push(Immediate(0)); // Saved entry sp, patched before call.
940 push(Immediate(CodeObject())); // Accessed from ExitFrame::code_slot.
942 // Save the frame pointer and the context in top.
943 ExternalReference c_entry_fp_address(Isolate::kCEntryFPAddress, isolate());
944 ExternalReference context_address(Isolate::kContextAddress, isolate());
945 ExternalReference c_function_address(Isolate::kCFunctionAddress, isolate());
946 mov(Operand::StaticVariable(c_entry_fp_address), ebp);
947 mov(Operand::StaticVariable(context_address), esi);
948 mov(Operand::StaticVariable(c_function_address), ebx);
952 void MacroAssembler::EnterExitFrameEpilogue(int argc, bool save_doubles) {
953 // Optionally save all XMM registers.
955 int space = XMMRegister::kMaxNumRegisters * kSIMD128Size +
957 sub(esp, Immediate(space));
958 const int offset = -2 * kPointerSize;
959 for (int i = 0; i < XMMRegister::kMaxNumRegisters; i++) {
960 XMMRegister reg = XMMRegister::from_code(i);
961 movups(Operand(ebp, offset - ((i + 1) * kSIMD128Size)), reg);
964 sub(esp, Immediate(argc * kPointerSize));
967 // Get the required frame alignment for the OS.
968 const int kFrameAlignment = base::OS::ActivationFrameAlignment();
969 if (kFrameAlignment > 0) {
970 DCHECK(base::bits::IsPowerOfTwo32(kFrameAlignment));
971 and_(esp, -kFrameAlignment);
974 // Patch the saved entry sp.
975 mov(Operand(ebp, ExitFrameConstants::kSPOffset), esp);
979 void MacroAssembler::EnterExitFrame(bool save_doubles) {
980 EnterExitFramePrologue();
982 // Set up argc and argv in callee-saved registers.
983 int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
985 lea(esi, Operand(ebp, eax, times_4, offset));
987 // Reserve space for argc, argv and isolate.
988 EnterExitFrameEpilogue(3, save_doubles);
992 void MacroAssembler::EnterApiExitFrame(int argc) {
993 EnterExitFramePrologue();
994 EnterExitFrameEpilogue(argc, false);
998 void MacroAssembler::LeaveExitFrame(bool save_doubles) {
999 // Optionally restore all XMM registers.
1001 const int offset = -2 * kPointerSize;
1002 for (int i = 0; i < XMMRegister::kMaxNumRegisters; i++) {
1003 XMMRegister reg = XMMRegister::from_code(i);
1004 movups(reg, Operand(ebp, offset - ((i + 1) * kSIMD128Size)));
1008 // Get the return address from the stack and restore the frame pointer.
1009 mov(ecx, Operand(ebp, 1 * kPointerSize));
1010 mov(ebp, Operand(ebp, 0 * kPointerSize));
1012 // Pop the arguments and the receiver from the caller stack.
1013 lea(esp, Operand(esi, 1 * kPointerSize));
1015 // Push the return address to get ready to return.
1018 LeaveExitFrameEpilogue(true);
1022 void MacroAssembler::LeaveExitFrameEpilogue(bool restore_context) {
1023 // Restore current context from top and clear it in debug mode.
1024 ExternalReference context_address(Isolate::kContextAddress, isolate());
1025 if (restore_context) {
1026 mov(esi, Operand::StaticVariable(context_address));
1029 mov(Operand::StaticVariable(context_address), Immediate(0));
1032 // Clear the top frame.
1033 ExternalReference c_entry_fp_address(Isolate::kCEntryFPAddress,
1035 mov(Operand::StaticVariable(c_entry_fp_address), Immediate(0));
1039 void MacroAssembler::LeaveApiExitFrame(bool restore_context) {
1043 LeaveExitFrameEpilogue(restore_context);
1047 void MacroAssembler::PushTryHandler(StackHandler::Kind kind,
1048 int handler_index) {
1049 // Adjust this code if not the case.
1050 STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize);
1051 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
1052 STATIC_ASSERT(StackHandlerConstants::kCodeOffset == 1 * kPointerSize);
1053 STATIC_ASSERT(StackHandlerConstants::kStateOffset == 2 * kPointerSize);
1054 STATIC_ASSERT(StackHandlerConstants::kContextOffset == 3 * kPointerSize);
1055 STATIC_ASSERT(StackHandlerConstants::kFPOffset == 4 * kPointerSize);
1057 // We will build up the handler from the bottom by pushing on the stack.
1058 // First push the frame pointer and context.
1059 if (kind == StackHandler::JS_ENTRY) {
1060 // The frame pointer does not point to a JS frame so we save NULL for
1061 // ebp. We expect the code throwing an exception to check ebp before
1062 // dereferencing it to restore the context.
1063 push(Immediate(0)); // NULL frame pointer.
1064 push(Immediate(Smi::FromInt(0))); // No context.
1069 // Push the state and the code object.
1071 StackHandler::IndexField::encode(handler_index) |
1072 StackHandler::KindField::encode(kind);
1073 push(Immediate(state));
1076 // Link the current handler as the next handler.
1077 ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
1078 push(Operand::StaticVariable(handler_address));
1079 // Set this new handler as the current one.
1080 mov(Operand::StaticVariable(handler_address), esp);
1084 void MacroAssembler::PopTryHandler() {
1085 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
1086 ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
1087 pop(Operand::StaticVariable(handler_address));
1088 add(esp, Immediate(StackHandlerConstants::kSize - kPointerSize));
1092 void MacroAssembler::JumpToHandlerEntry() {
1093 // Compute the handler entry address and jump to it. The handler table is
1094 // a fixed array of (smi-tagged) code offsets.
1095 // eax = exception, edi = code object, edx = state.
1096 mov(ebx, FieldOperand(edi, Code::kHandlerTableOffset));
1097 shr(edx, StackHandler::kKindWidth);
1098 mov(edx, FieldOperand(ebx, edx, times_4, FixedArray::kHeaderSize));
1100 lea(edi, FieldOperand(edi, edx, times_1, Code::kHeaderSize));
1105 void MacroAssembler::Throw(Register value) {
1106 // Adjust this code if not the case.
1107 STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize);
1108 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
1109 STATIC_ASSERT(StackHandlerConstants::kCodeOffset == 1 * kPointerSize);
1110 STATIC_ASSERT(StackHandlerConstants::kStateOffset == 2 * kPointerSize);
1111 STATIC_ASSERT(StackHandlerConstants::kContextOffset == 3 * kPointerSize);
1112 STATIC_ASSERT(StackHandlerConstants::kFPOffset == 4 * kPointerSize);
1114 // The exception is expected in eax.
1115 if (!value.is(eax)) {
1118 // Drop the stack pointer to the top of the top handler.
1119 ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
1120 mov(esp, Operand::StaticVariable(handler_address));
1121 // Restore the next handler.
1122 pop(Operand::StaticVariable(handler_address));
1124 // Remove the code object and state, compute the handler address in edi.
1125 pop(edi); // Code object.
1126 pop(edx); // Index and state.
1128 // Restore the context and frame pointer.
1129 pop(esi); // Context.
1130 pop(ebp); // Frame pointer.
1132 // If the handler is a JS frame, restore the context to the frame.
1133 // (kind == ENTRY) == (ebp == 0) == (esi == 0), so we could test either
1137 j(zero, &skip, Label::kNear);
1138 mov(Operand(ebp, StandardFrameConstants::kContextOffset), esi);
1141 JumpToHandlerEntry();
1145 void MacroAssembler::ThrowUncatchable(Register value) {
1146 // Adjust this code if not the case.
1147 STATIC_ASSERT(StackHandlerConstants::kSize == 5 * kPointerSize);
1148 STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
1149 STATIC_ASSERT(StackHandlerConstants::kCodeOffset == 1 * kPointerSize);
1150 STATIC_ASSERT(StackHandlerConstants::kStateOffset == 2 * kPointerSize);
1151 STATIC_ASSERT(StackHandlerConstants::kContextOffset == 3 * kPointerSize);
1152 STATIC_ASSERT(StackHandlerConstants::kFPOffset == 4 * kPointerSize);
1154 // The exception is expected in eax.
1155 if (!value.is(eax)) {
1158 // Drop the stack pointer to the top of the top stack handler.
1159 ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
1160 mov(esp, Operand::StaticVariable(handler_address));
1162 // Unwind the handlers until the top ENTRY handler is found.
1163 Label fetch_next, check_kind;
1164 jmp(&check_kind, Label::kNear);
1166 mov(esp, Operand(esp, StackHandlerConstants::kNextOffset));
1169 STATIC_ASSERT(StackHandler::JS_ENTRY == 0);
1170 test(Operand(esp, StackHandlerConstants::kStateOffset),
1171 Immediate(StackHandler::KindField::kMask));
1172 j(not_zero, &fetch_next);
1174 // Set the top handler address to next handler past the top ENTRY handler.
1175 pop(Operand::StaticVariable(handler_address));
1177 // Remove the code object and state, compute the handler address in edi.
1178 pop(edi); // Code object.
1179 pop(edx); // Index and state.
1181 // Clear the context pointer and frame pointer (0 was saved in the handler).
1185 JumpToHandlerEntry();
1189 void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
1193 Label same_contexts;
1195 DCHECK(!holder_reg.is(scratch1));
1196 DCHECK(!holder_reg.is(scratch2));
1197 DCHECK(!scratch1.is(scratch2));
1199 // Load current lexical context from the stack frame.
1200 mov(scratch1, Operand(ebp, StandardFrameConstants::kContextOffset));
1202 // When generating debug code, make sure the lexical context is set.
1203 if (emit_debug_code()) {
1204 cmp(scratch1, Immediate(0));
1205 Check(not_equal, kWeShouldNotHaveAnEmptyLexicalContext);
1207 // Load the native context of the current context.
1209 Context::kHeaderSize + Context::GLOBAL_OBJECT_INDEX * kPointerSize;
1210 mov(scratch1, FieldOperand(scratch1, offset));
1211 mov(scratch1, FieldOperand(scratch1, GlobalObject::kNativeContextOffset));
1213 // Check the context is a native context.
1214 if (emit_debug_code()) {
1215 // Read the first word and compare to native_context_map.
1216 cmp(FieldOperand(scratch1, HeapObject::kMapOffset),
1217 isolate()->factory()->native_context_map());
1218 Check(equal, kJSGlobalObjectNativeContextShouldBeANativeContext);
1221 // Check if both contexts are the same.
1222 cmp(scratch1, FieldOperand(holder_reg, JSGlobalProxy::kNativeContextOffset));
1223 j(equal, &same_contexts);
1225 // Compare security tokens, save holder_reg on the stack so we can use it
1226 // as a temporary register.
1228 // Check that the security token in the calling global object is
1229 // compatible with the security token in the receiving global
1232 FieldOperand(holder_reg, JSGlobalProxy::kNativeContextOffset));
1234 // Check the context is a native context.
1235 if (emit_debug_code()) {
1236 cmp(scratch2, isolate()->factory()->null_value());
1237 Check(not_equal, kJSGlobalProxyContextShouldNotBeNull);
1239 // Read the first word and compare to native_context_map(),
1240 cmp(FieldOperand(scratch2, HeapObject::kMapOffset),
1241 isolate()->factory()->native_context_map());
1242 Check(equal, kJSGlobalObjectNativeContextShouldBeANativeContext);
1245 int token_offset = Context::kHeaderSize +
1246 Context::SECURITY_TOKEN_INDEX * kPointerSize;
1247 mov(scratch1, FieldOperand(scratch1, token_offset));
1248 cmp(scratch1, FieldOperand(scratch2, token_offset));
1251 bind(&same_contexts);
1255 // Compute the hash code from the untagged key. This must be kept in sync with
1256 // ComputeIntegerHash in utils.h and KeyedLoadGenericStub in
1257 // code-stub-hydrogen.cc
1259 // Note: r0 will contain hash code
1260 void MacroAssembler::GetNumberHash(Register r0, Register scratch) {
1261 // Xor original key with a seed.
1262 if (serializer_enabled()) {
1263 ExternalReference roots_array_start =
1264 ExternalReference::roots_array_start(isolate());
1265 mov(scratch, Immediate(Heap::kHashSeedRootIndex));
1267 Operand::StaticArray(scratch, times_pointer_size, roots_array_start));
1271 int32_t seed = isolate()->heap()->HashSeed();
1272 xor_(r0, Immediate(seed));
1275 // hash = ~hash + (hash << 15);
1280 // hash = hash ^ (hash >> 12);
1284 // hash = hash + (hash << 2);
1285 lea(r0, Operand(r0, r0, times_4, 0));
1286 // hash = hash ^ (hash >> 4);
1290 // hash = hash * 2057;
1292 // hash = hash ^ (hash >> 16);
1300 void MacroAssembler::LoadFromNumberDictionary(Label* miss,
1309 // elements - holds the slow-case elements of the receiver and is unchanged.
1311 // key - holds the smi key on entry and is unchanged.
1313 // Scratch registers:
1315 // r0 - holds the untagged key on entry and holds the hash once computed.
1317 // r1 - used to hold the capacity mask of the dictionary
1319 // r2 - used for the index into the dictionary.
1321 // result - holds the result on exit if the load succeeds and we fall through.
1325 GetNumberHash(r0, r1);
1327 // Compute capacity mask.
1328 mov(r1, FieldOperand(elements, SeededNumberDictionary::kCapacityOffset));
1329 shr(r1, kSmiTagSize); // convert smi to int
1332 // Generate an unrolled loop that performs a few probes before giving up.
1333 for (int i = 0; i < kNumberDictionaryProbes; i++) {
1334 // Use r2 for index calculations and keep the hash intact in r0.
1336 // Compute the masked index: (hash + i + i * i) & mask.
1338 add(r2, Immediate(SeededNumberDictionary::GetProbeOffset(i)));
1342 // Scale the index by multiplying by the entry size.
1343 DCHECK(SeededNumberDictionary::kEntrySize == 3);
1344 lea(r2, Operand(r2, r2, times_2, 0)); // r2 = r2 * 3
1346 // Check if the key matches.
1347 cmp(key, FieldOperand(elements,
1350 SeededNumberDictionary::kElementsStartOffset));
1351 if (i != (kNumberDictionaryProbes - 1)) {
1359 // Check that the value is a normal propety.
1360 const int kDetailsOffset =
1361 SeededNumberDictionary::kElementsStartOffset + 2 * kPointerSize;
1362 DCHECK_EQ(NORMAL, 0);
1363 test(FieldOperand(elements, r2, times_pointer_size, kDetailsOffset),
1364 Immediate(PropertyDetails::TypeField::kMask << kSmiTagSize));
1367 // Get the value at the masked, scaled index.
1368 const int kValueOffset =
1369 SeededNumberDictionary::kElementsStartOffset + kPointerSize;
1370 mov(result, FieldOperand(elements, r2, times_pointer_size, kValueOffset));
1374 void MacroAssembler::LoadAllocationTopHelper(Register result,
1376 AllocationFlags flags) {
1377 ExternalReference allocation_top =
1378 AllocationUtils::GetAllocationTopReference(isolate(), flags);
1380 // Just return if allocation top is already known.
1381 if ((flags & RESULT_CONTAINS_TOP) != 0) {
1382 // No use of scratch if allocation top is provided.
1383 DCHECK(scratch.is(no_reg));
1385 // Assert that result actually contains top on entry.
1386 cmp(result, Operand::StaticVariable(allocation_top));
1387 Check(equal, kUnexpectedAllocationTop);
1392 // Move address of new object to result. Use scratch register if available.
1393 if (scratch.is(no_reg)) {
1394 mov(result, Operand::StaticVariable(allocation_top));
1396 mov(scratch, Immediate(allocation_top));
1397 mov(result, Operand(scratch, 0));
1402 void MacroAssembler::UpdateAllocationTopHelper(Register result_end,
1404 AllocationFlags flags) {
1405 if (emit_debug_code()) {
1406 test(result_end, Immediate(kObjectAlignmentMask));
1407 Check(zero, kUnalignedAllocationInNewSpace);
1410 ExternalReference allocation_top =
1411 AllocationUtils::GetAllocationTopReference(isolate(), flags);
1413 // Update new top. Use scratch if available.
1414 if (scratch.is(no_reg)) {
1415 mov(Operand::StaticVariable(allocation_top), result_end);
1417 mov(Operand(scratch, 0), result_end);
1422 void MacroAssembler::Allocate(int object_size,
1424 Register result_end,
1427 AllocationFlags flags) {
1428 DCHECK((flags & (RESULT_CONTAINS_TOP | SIZE_IN_WORDS)) == 0);
1429 DCHECK(object_size <= Page::kMaxRegularHeapObjectSize);
1430 if (!FLAG_inline_new) {
1431 if (emit_debug_code()) {
1432 // Trash the registers to simulate an allocation failure.
1433 mov(result, Immediate(0x7091));
1434 if (result_end.is_valid()) {
1435 mov(result_end, Immediate(0x7191));
1437 if (scratch.is_valid()) {
1438 mov(scratch, Immediate(0x7291));
1444 DCHECK(!result.is(result_end));
1446 // Load address of new object into result.
1447 LoadAllocationTopHelper(result, scratch, flags);
1449 ExternalReference allocation_limit =
1450 AllocationUtils::GetAllocationLimitReference(isolate(), flags);
1452 // Align the next allocation. Storing the filler map without checking top is
1453 // safe in new-space because the limit of the heap is aligned there.
1454 if ((flags & DOUBLE_ALIGNMENT) != 0) {
1455 DCHECK((flags & PRETENURE_OLD_POINTER_SPACE) == 0);
1456 DCHECK(kPointerAlignment * 2 == kDoubleAlignment);
1458 test(result, Immediate(kDoubleAlignmentMask));
1459 j(zero, &aligned, Label::kNear);
1460 if ((flags & PRETENURE_OLD_DATA_SPACE) != 0) {
1461 cmp(result, Operand::StaticVariable(allocation_limit));
1462 j(above_equal, gc_required);
1464 mov(Operand(result, 0),
1465 Immediate(isolate()->factory()->one_pointer_filler_map()));
1466 add(result, Immediate(kDoubleSize / 2));
1470 // Calculate new top and bail out if space is exhausted.
1471 Register top_reg = result_end.is_valid() ? result_end : result;
1472 if (!top_reg.is(result)) {
1473 mov(top_reg, result);
1475 add(top_reg, Immediate(object_size));
1476 j(carry, gc_required);
1477 cmp(top_reg, Operand::StaticVariable(allocation_limit));
1478 j(above, gc_required);
1480 // Update allocation top.
1481 UpdateAllocationTopHelper(top_reg, scratch, flags);
1483 // Tag result if requested.
1484 bool tag_result = (flags & TAG_OBJECT) != 0;
1485 if (top_reg.is(result)) {
1487 sub(result, Immediate(object_size - kHeapObjectTag));
1489 sub(result, Immediate(object_size));
1491 } else if (tag_result) {
1492 DCHECK(kHeapObjectTag == 1);
1498 void MacroAssembler::Allocate(int header_size,
1499 ScaleFactor element_size,
1500 Register element_count,
1501 RegisterValueType element_count_type,
1503 Register result_end,
1506 AllocationFlags flags) {
1507 DCHECK((flags & SIZE_IN_WORDS) == 0);
1508 if (!FLAG_inline_new) {
1509 if (emit_debug_code()) {
1510 // Trash the registers to simulate an allocation failure.
1511 mov(result, Immediate(0x7091));
1512 mov(result_end, Immediate(0x7191));
1513 if (scratch.is_valid()) {
1514 mov(scratch, Immediate(0x7291));
1516 // Register element_count is not modified by the function.
1521 DCHECK(!result.is(result_end));
1523 // Load address of new object into result.
1524 LoadAllocationTopHelper(result, scratch, flags);
1526 ExternalReference allocation_limit =
1527 AllocationUtils::GetAllocationLimitReference(isolate(), flags);
1529 // Align the next allocation. Storing the filler map without checking top is
1530 // safe in new-space because the limit of the heap is aligned there.
1531 if ((flags & DOUBLE_ALIGNMENT) != 0) {
1532 DCHECK((flags & PRETENURE_OLD_POINTER_SPACE) == 0);
1533 DCHECK(kPointerAlignment * 2 == kDoubleAlignment);
1535 test(result, Immediate(kDoubleAlignmentMask));
1536 j(zero, &aligned, Label::kNear);
1537 if ((flags & PRETENURE_OLD_DATA_SPACE) != 0) {
1538 cmp(result, Operand::StaticVariable(allocation_limit));
1539 j(above_equal, gc_required);
1541 mov(Operand(result, 0),
1542 Immediate(isolate()->factory()->one_pointer_filler_map()));
1543 add(result, Immediate(kDoubleSize / 2));
1547 // Calculate new top and bail out if space is exhausted.
1548 // We assume that element_count*element_size + header_size does not
1550 if (element_count_type == REGISTER_VALUE_IS_SMI) {
1551 STATIC_ASSERT(static_cast<ScaleFactor>(times_2 - 1) == times_1);
1552 STATIC_ASSERT(static_cast<ScaleFactor>(times_4 - 1) == times_2);
1553 STATIC_ASSERT(static_cast<ScaleFactor>(times_8 - 1) == times_4);
1554 DCHECK(element_size >= times_2);
1555 DCHECK(kSmiTagSize == 1);
1556 element_size = static_cast<ScaleFactor>(element_size - 1);
1558 DCHECK(element_count_type == REGISTER_VALUE_IS_INT32);
1560 lea(result_end, Operand(element_count, element_size, header_size));
1561 add(result_end, result);
1562 j(carry, gc_required);
1563 cmp(result_end, Operand::StaticVariable(allocation_limit));
1564 j(above, gc_required);
1566 if ((flags & TAG_OBJECT) != 0) {
1567 DCHECK(kHeapObjectTag == 1);
1571 // Update allocation top.
1572 UpdateAllocationTopHelper(result_end, scratch, flags);
1576 void MacroAssembler::Allocate(Register object_size,
1578 Register result_end,
1581 AllocationFlags flags) {
1582 DCHECK((flags & (RESULT_CONTAINS_TOP | SIZE_IN_WORDS)) == 0);
1583 if (!FLAG_inline_new) {
1584 if (emit_debug_code()) {
1585 // Trash the registers to simulate an allocation failure.
1586 mov(result, Immediate(0x7091));
1587 mov(result_end, Immediate(0x7191));
1588 if (scratch.is_valid()) {
1589 mov(scratch, Immediate(0x7291));
1591 // object_size is left unchanged by this function.
1596 DCHECK(!result.is(result_end));
1598 // Load address of new object into result.
1599 LoadAllocationTopHelper(result, scratch, flags);
1601 ExternalReference allocation_limit =
1602 AllocationUtils::GetAllocationLimitReference(isolate(), flags);
1604 // Align the next allocation. Storing the filler map without checking top is
1605 // safe in new-space because the limit of the heap is aligned there.
1606 if ((flags & DOUBLE_ALIGNMENT) != 0) {
1607 DCHECK((flags & PRETENURE_OLD_POINTER_SPACE) == 0);
1608 DCHECK(kPointerAlignment * 2 == kDoubleAlignment);
1610 test(result, Immediate(kDoubleAlignmentMask));
1611 j(zero, &aligned, Label::kNear);
1612 if ((flags & PRETENURE_OLD_DATA_SPACE) != 0) {
1613 cmp(result, Operand::StaticVariable(allocation_limit));
1614 j(above_equal, gc_required);
1616 mov(Operand(result, 0),
1617 Immediate(isolate()->factory()->one_pointer_filler_map()));
1618 add(result, Immediate(kDoubleSize / 2));
1622 // Calculate new top and bail out if space is exhausted.
1623 if (!object_size.is(result_end)) {
1624 mov(result_end, object_size);
1626 add(result_end, result);
1627 j(carry, gc_required);
1628 cmp(result_end, Operand::StaticVariable(allocation_limit));
1629 j(above, gc_required);
1631 // Tag result if requested.
1632 if ((flags & TAG_OBJECT) != 0) {
1633 DCHECK(kHeapObjectTag == 1);
1637 // Update allocation top.
1638 UpdateAllocationTopHelper(result_end, scratch, flags);
1642 void MacroAssembler::UndoAllocationInNewSpace(Register object) {
1643 ExternalReference new_space_allocation_top =
1644 ExternalReference::new_space_allocation_top_address(isolate());
1646 // Make sure the object has no tag before resetting top.
1647 and_(object, Immediate(~kHeapObjectTagMask));
1649 cmp(object, Operand::StaticVariable(new_space_allocation_top));
1650 Check(below, kUndoAllocationOfNonAllocatedMemory);
1652 mov(Operand::StaticVariable(new_space_allocation_top), object);
1656 void MacroAssembler::AllocateHeapNumber(Register result,
1661 // Allocate heap number in new space.
1662 Allocate(HeapNumber::kSize, result, scratch1, scratch2, gc_required,
1665 Handle<Map> map = mode == MUTABLE
1666 ? isolate()->factory()->mutable_heap_number_map()
1667 : isolate()->factory()->heap_number_map();
1670 mov(FieldOperand(result, HeapObject::kMapOffset), Immediate(map));
1674 #define SIMD128_HEAP_ALLOCATE_FUNCTIONS(V) \
1675 V(Float32x4, float32x4, FLOAT32x4) \
1676 V(Float64x2, float64x2, FLOAT64x2) \
1677 V(Int32x4, int32x4, INT32x4)
1679 #define DECLARE_SIMD_HEAP_ALLOCATE_FUNCTION(Type, type, TYPE) \
1680 void MacroAssembler::Allocate##Type(Register result, \
1681 Register scratch1, \
1682 Register scratch2, \
1683 Label* gc_required) { \
1684 /* Allocate SIMD128 object */ \
1685 Allocate(Type::kSize, result, scratch1, no_reg, gc_required, TAG_OBJECT);\
1686 /* Load the initial map and assign to new allocated object. */ \
1687 mov(scratch1, Operand(ebp, StandardFrameConstants::kContextOffset)); \
1690 Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX))); \
1692 FieldOperand(scratch1, GlobalObject::kNativeContextOffset)); \
1695 Context::SlotOffset(Context::TYPE##_FUNCTION_INDEX))); \
1696 LoadGlobalFunctionInitialMap(scratch1, scratch1); \
1697 mov(FieldOperand(result, JSObject::kMapOffset), scratch1); \
1698 /* Initialize properties and elements. */ \
1699 mov(FieldOperand(result, JSObject::kPropertiesOffset), \
1700 Immediate(isolate()->factory()->empty_fixed_array())); \
1701 mov(FieldOperand(result, JSObject::kElementsOffset), \
1702 Immediate(isolate()->factory()->empty_fixed_array())); \
1703 /* Allocate FixedTypedArray object */ \
1704 Allocate(FixedTypedArrayBase::kDataOffset + k##Type##Size, \
1705 scratch1, scratch2, no_reg, gc_required, TAG_OBJECT); \
1707 mov(FieldOperand(scratch1, FixedTypedArrayBase::kMapOffset), \
1708 Immediate(isolate()->factory()->fixed_##type##_array_map())); \
1709 mov(scratch2, Immediate(1)); \
1711 mov(FieldOperand(scratch1, FixedTypedArrayBase::kLengthOffset), \
1713 /* Assign TifxedTypedArray object to SIMD128 object */ \
1714 mov(FieldOperand(result, Type::kValueOffset), scratch1); \
1717 SIMD128_HEAP_ALLOCATE_FUNCTIONS(DECLARE_SIMD_HEAP_ALLOCATE_FUNCTION)
1720 void MacroAssembler::AllocateTwoByteString(Register result,
1725 Label* gc_required) {
1726 // Calculate the number of bytes needed for the characters in the string while
1727 // observing object alignment.
1728 DCHECK((SeqTwoByteString::kHeaderSize & kObjectAlignmentMask) == 0);
1729 DCHECK(kShortSize == 2);
1730 // scratch1 = length * 2 + kObjectAlignmentMask.
1731 lea(scratch1, Operand(length, length, times_1, kObjectAlignmentMask));
1732 and_(scratch1, Immediate(~kObjectAlignmentMask));
1734 // Allocate two byte string in new space.
1735 Allocate(SeqTwoByteString::kHeaderSize,
1738 REGISTER_VALUE_IS_INT32,
1745 // Set the map, length and hash field.
1746 mov(FieldOperand(result, HeapObject::kMapOffset),
1747 Immediate(isolate()->factory()->string_map()));
1748 mov(scratch1, length);
1750 mov(FieldOperand(result, String::kLengthOffset), scratch1);
1751 mov(FieldOperand(result, String::kHashFieldOffset),
1752 Immediate(String::kEmptyHashField));
1756 void MacroAssembler::AllocateOneByteString(Register result, Register length,
1757 Register scratch1, Register scratch2,
1759 Label* gc_required) {
1760 // Calculate the number of bytes needed for the characters in the string while
1761 // observing object alignment.
1762 DCHECK((SeqOneByteString::kHeaderSize & kObjectAlignmentMask) == 0);
1763 mov(scratch1, length);
1764 DCHECK(kCharSize == 1);
1765 add(scratch1, Immediate(kObjectAlignmentMask));
1766 and_(scratch1, Immediate(~kObjectAlignmentMask));
1768 // Allocate one-byte string in new space.
1769 Allocate(SeqOneByteString::kHeaderSize,
1772 REGISTER_VALUE_IS_INT32,
1779 // Set the map, length and hash field.
1780 mov(FieldOperand(result, HeapObject::kMapOffset),
1781 Immediate(isolate()->factory()->one_byte_string_map()));
1782 mov(scratch1, length);
1784 mov(FieldOperand(result, String::kLengthOffset), scratch1);
1785 mov(FieldOperand(result, String::kHashFieldOffset),
1786 Immediate(String::kEmptyHashField));
1790 void MacroAssembler::AllocateOneByteString(Register result, int length,
1791 Register scratch1, Register scratch2,
1792 Label* gc_required) {
1795 // Allocate one-byte string in new space.
1796 Allocate(SeqOneByteString::SizeFor(length), result, scratch1, scratch2,
1797 gc_required, TAG_OBJECT);
1799 // Set the map, length and hash field.
1800 mov(FieldOperand(result, HeapObject::kMapOffset),
1801 Immediate(isolate()->factory()->one_byte_string_map()));
1802 mov(FieldOperand(result, String::kLengthOffset),
1803 Immediate(Smi::FromInt(length)));
1804 mov(FieldOperand(result, String::kHashFieldOffset),
1805 Immediate(String::kEmptyHashField));
1809 void MacroAssembler::AllocateTwoByteConsString(Register result,
1812 Label* gc_required) {
1813 // Allocate heap number in new space.
1814 Allocate(ConsString::kSize, result, scratch1, scratch2, gc_required,
1817 // Set the map. The other fields are left uninitialized.
1818 mov(FieldOperand(result, HeapObject::kMapOffset),
1819 Immediate(isolate()->factory()->cons_string_map()));
1823 void MacroAssembler::AllocateOneByteConsString(Register result,
1826 Label* gc_required) {
1827 Allocate(ConsString::kSize,
1834 // Set the map. The other fields are left uninitialized.
1835 mov(FieldOperand(result, HeapObject::kMapOffset),
1836 Immediate(isolate()->factory()->cons_one_byte_string_map()));
1840 void MacroAssembler::AllocateTwoByteSlicedString(Register result,
1843 Label* gc_required) {
1844 // Allocate heap number in new space.
1845 Allocate(SlicedString::kSize, result, scratch1, scratch2, gc_required,
1848 // Set the map. The other fields are left uninitialized.
1849 mov(FieldOperand(result, HeapObject::kMapOffset),
1850 Immediate(isolate()->factory()->sliced_string_map()));
1854 void MacroAssembler::AllocateOneByteSlicedString(Register result,
1857 Label* gc_required) {
1858 // Allocate heap number in new space.
1859 Allocate(SlicedString::kSize, result, scratch1, scratch2, gc_required,
1862 // Set the map. The other fields are left uninitialized.
1863 mov(FieldOperand(result, HeapObject::kMapOffset),
1864 Immediate(isolate()->factory()->sliced_one_byte_string_map()));
1868 // Copy memory, byte-by-byte, from source to destination. Not optimized for
1869 // long or aligned copies. The contents of scratch and length are destroyed.
1870 // Source and destination are incremented by length.
1871 // Many variants of movsb, loop unrolling, word moves, and indexed operands
1872 // have been tried here already, and this is fastest.
1873 // A simpler loop is faster on small copies, but 30% slower on large ones.
1874 // The cld() instruction must have been emitted, to set the direction flag(),
1875 // before calling this function.
1876 void MacroAssembler::CopyBytes(Register source,
1877 Register destination,
1880 Label short_loop, len4, len8, len12, done, short_string;
1881 DCHECK(source.is(esi));
1882 DCHECK(destination.is(edi));
1883 DCHECK(length.is(ecx));
1884 cmp(length, Immediate(4));
1885 j(below, &short_string, Label::kNear);
1887 // Because source is 4-byte aligned in our uses of this function,
1888 // we keep source aligned for the rep_movs call by copying the odd bytes
1889 // at the end of the ranges.
1890 mov(scratch, Operand(source, length, times_1, -4));
1891 mov(Operand(destination, length, times_1, -4), scratch);
1893 cmp(length, Immediate(8));
1894 j(below_equal, &len4, Label::kNear);
1895 cmp(length, Immediate(12));
1896 j(below_equal, &len8, Label::kNear);
1897 cmp(length, Immediate(16));
1898 j(below_equal, &len12, Label::kNear);
1903 and_(scratch, Immediate(0x3));
1904 add(destination, scratch);
1905 jmp(&done, Label::kNear);
1908 mov(scratch, Operand(source, 8));
1909 mov(Operand(destination, 8), scratch);
1911 mov(scratch, Operand(source, 4));
1912 mov(Operand(destination, 4), scratch);
1914 mov(scratch, Operand(source, 0));
1915 mov(Operand(destination, 0), scratch);
1916 add(destination, length);
1917 jmp(&done, Label::kNear);
1919 bind(&short_string);
1920 test(length, length);
1921 j(zero, &done, Label::kNear);
1924 mov_b(scratch, Operand(source, 0));
1925 mov_b(Operand(destination, 0), scratch);
1929 j(not_zero, &short_loop);
1935 void MacroAssembler::InitializeFieldsWithFiller(Register start_offset,
1936 Register end_offset,
1941 mov(Operand(start_offset, 0), filler);
1942 add(start_offset, Immediate(kPointerSize));
1944 cmp(start_offset, end_offset);
1949 void MacroAssembler::BooleanBitTest(Register object,
1952 bit_index += kSmiTagSize + kSmiShiftSize;
1953 DCHECK(base::bits::IsPowerOfTwo32(kBitsPerByte));
1954 int byte_index = bit_index / kBitsPerByte;
1955 int byte_bit_index = bit_index & (kBitsPerByte - 1);
1956 test_b(FieldOperand(object, field_offset + byte_index),
1957 static_cast<byte>(1 << byte_bit_index));
1962 void MacroAssembler::NegativeZeroTest(Register result,
1964 Label* then_label) {
1966 test(result, result);
1969 j(sign, then_label);
1974 void MacroAssembler::NegativeZeroTest(Register result,
1978 Label* then_label) {
1980 test(result, result);
1984 j(sign, then_label);
1989 void MacroAssembler::TryGetFunctionPrototype(Register function,
1993 bool miss_on_bound_function) {
1995 if (miss_on_bound_function) {
1996 // Check that the receiver isn't a smi.
1997 JumpIfSmi(function, miss);
1999 // Check that the function really is a function.
2000 CmpObjectType(function, JS_FUNCTION_TYPE, result);
2003 // If a bound function, go to miss label.
2005 FieldOperand(function, JSFunction::kSharedFunctionInfoOffset));
2006 BooleanBitTest(scratch, SharedFunctionInfo::kCompilerHintsOffset,
2007 SharedFunctionInfo::kBoundFunction);
2010 // Make sure that the function has an instance prototype.
2011 movzx_b(scratch, FieldOperand(result, Map::kBitFieldOffset));
2012 test(scratch, Immediate(1 << Map::kHasNonInstancePrototype));
2013 j(not_zero, &non_instance);
2016 // Get the prototype or initial map from the function.
2018 FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
2020 // If the prototype or initial map is the hole, don't return it and
2021 // simply miss the cache instead. This will allow us to allocate a
2022 // prototype object on-demand in the runtime system.
2023 cmp(result, Immediate(isolate()->factory()->the_hole_value()));
2026 // If the function does not have an initial map, we're done.
2028 CmpObjectType(result, MAP_TYPE, scratch);
2029 j(not_equal, &done);
2031 // Get the prototype from the initial map.
2032 mov(result, FieldOperand(result, Map::kPrototypeOffset));
2034 if (miss_on_bound_function) {
2037 // Non-instance prototype: Fetch prototype from constructor field
2039 bind(&non_instance);
2040 mov(result, FieldOperand(result, Map::kConstructorOffset));
2048 void MacroAssembler::CallStub(CodeStub* stub, TypeFeedbackId ast_id) {
2049 DCHECK(AllowThisStubCall(stub)); // Calls are not allowed in some stubs.
2050 call(stub->GetCode(), RelocInfo::CODE_TARGET, ast_id);
2054 void MacroAssembler::TailCallStub(CodeStub* stub) {
2055 jmp(stub->GetCode(), RelocInfo::CODE_TARGET);
2059 void MacroAssembler::StubReturn(int argc) {
2060 DCHECK(argc >= 1 && generating_stub());
2061 ret((argc - 1) * kPointerSize);
2065 bool MacroAssembler::AllowThisStubCall(CodeStub* stub) {
2066 return has_frame_ || !stub->SometimesSetsUpAFrame();
2070 void MacroAssembler::IndexFromHash(Register hash, Register index) {
2071 // The assert checks that the constants for the maximum number of digits
2072 // for an array index cached in the hash field and the number of bits
2073 // reserved for it does not conflict.
2074 DCHECK(TenToThe(String::kMaxCachedArrayIndexLength) <
2075 (1 << String::kArrayIndexValueBits));
2076 if (!index.is(hash)) {
2079 DecodeFieldToSmi<String::ArrayIndexValueBits>(index);
2083 void MacroAssembler::CallRuntime(const Runtime::Function* f,
2085 SaveFPRegsMode save_doubles) {
2086 // If the expected number of arguments of the runtime function is
2087 // constant, we check that the actual number of arguments match the
2089 CHECK(f->nargs < 0 || f->nargs == num_arguments);
2091 // TODO(1236192): Most runtime routines don't need the number of
2092 // arguments passed in because it is constant. At some point we
2093 // should remove this need and make the runtime routine entry code
2095 Move(eax, Immediate(num_arguments));
2096 mov(ebx, Immediate(ExternalReference(f, isolate())));
2097 CEntryStub ces(isolate(), 1, save_doubles);
2102 void MacroAssembler::CallExternalReference(ExternalReference ref,
2103 int num_arguments) {
2104 mov(eax, Immediate(num_arguments));
2105 mov(ebx, Immediate(ref));
2107 CEntryStub stub(isolate(), 1);
2112 void MacroAssembler::TailCallExternalReference(const ExternalReference& ext,
2115 // TODO(1236192): Most runtime routines don't need the number of
2116 // arguments passed in because it is constant. At some point we
2117 // should remove this need and make the runtime routine entry code
2119 Move(eax, Immediate(num_arguments));
2120 JumpToExternalReference(ext);
2124 void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid,
2127 TailCallExternalReference(ExternalReference(fid, isolate()),
2133 Operand ApiParameterOperand(int index) {
2134 return Operand(esp, index * kPointerSize);
2138 void MacroAssembler::PrepareCallApiFunction(int argc) {
2139 EnterApiExitFrame(argc);
2140 if (emit_debug_code()) {
2141 mov(esi, Immediate(bit_cast<int32_t>(kZapValue)));
2146 void MacroAssembler::CallApiFunctionAndReturn(
2147 Register function_address,
2148 ExternalReference thunk_ref,
2149 Operand thunk_last_arg,
2151 Operand return_value_operand,
2152 Operand* context_restore_operand) {
2153 ExternalReference next_address =
2154 ExternalReference::handle_scope_next_address(isolate());
2155 ExternalReference limit_address =
2156 ExternalReference::handle_scope_limit_address(isolate());
2157 ExternalReference level_address =
2158 ExternalReference::handle_scope_level_address(isolate());
2160 DCHECK(edx.is(function_address));
2161 // Allocate HandleScope in callee-save registers.
2162 mov(ebx, Operand::StaticVariable(next_address));
2163 mov(edi, Operand::StaticVariable(limit_address));
2164 add(Operand::StaticVariable(level_address), Immediate(1));
2166 if (FLAG_log_timer_events) {
2167 FrameScope frame(this, StackFrame::MANUAL);
2168 PushSafepointRegisters();
2169 PrepareCallCFunction(1, eax);
2170 mov(Operand(esp, 0),
2171 Immediate(ExternalReference::isolate_address(isolate())));
2172 CallCFunction(ExternalReference::log_enter_external_function(isolate()), 1);
2173 PopSafepointRegisters();
2177 Label profiler_disabled;
2178 Label end_profiler_check;
2179 mov(eax, Immediate(ExternalReference::is_profiling_address(isolate())));
2180 cmpb(Operand(eax, 0), 0);
2181 j(zero, &profiler_disabled);
2183 // Additional parameter is the address of the actual getter function.
2184 mov(thunk_last_arg, function_address);
2185 // Call the api function.
2186 mov(eax, Immediate(thunk_ref));
2188 jmp(&end_profiler_check);
2190 bind(&profiler_disabled);
2191 // Call the api function.
2192 call(function_address);
2193 bind(&end_profiler_check);
2195 if (FLAG_log_timer_events) {
2196 FrameScope frame(this, StackFrame::MANUAL);
2197 PushSafepointRegisters();
2198 PrepareCallCFunction(1, eax);
2199 mov(Operand(esp, 0),
2200 Immediate(ExternalReference::isolate_address(isolate())));
2201 CallCFunction(ExternalReference::log_leave_external_function(isolate()), 1);
2202 PopSafepointRegisters();
2206 // Load the value from ReturnValue
2207 mov(eax, return_value_operand);
2209 Label promote_scheduled_exception;
2210 Label exception_handled;
2211 Label delete_allocated_handles;
2212 Label leave_exit_frame;
2215 // No more valid handles (the result handle was the last one). Restore
2216 // previous handle scope.
2217 mov(Operand::StaticVariable(next_address), ebx);
2218 sub(Operand::StaticVariable(level_address), Immediate(1));
2219 Assert(above_equal, kInvalidHandleScopeLevel);
2220 cmp(edi, Operand::StaticVariable(limit_address));
2221 j(not_equal, &delete_allocated_handles);
2222 bind(&leave_exit_frame);
2224 // Check if the function scheduled an exception.
2225 ExternalReference scheduled_exception_address =
2226 ExternalReference::scheduled_exception_address(isolate());
2227 cmp(Operand::StaticVariable(scheduled_exception_address),
2228 Immediate(isolate()->factory()->the_hole_value()));
2229 j(not_equal, &promote_scheduled_exception);
2230 bind(&exception_handled);
2232 #if ENABLE_EXTRA_CHECKS
2233 // Check if the function returned a valid JavaScript value.
2235 Register return_value = eax;
2238 JumpIfSmi(return_value, &ok, Label::kNear);
2239 mov(map, FieldOperand(return_value, HeapObject::kMapOffset));
2241 CmpInstanceType(map, FIRST_NONSTRING_TYPE);
2242 j(below, &ok, Label::kNear);
2244 CmpInstanceType(map, FIRST_SPEC_OBJECT_TYPE);
2245 j(above_equal, &ok, Label::kNear);
2247 cmp(map, isolate()->factory()->heap_number_map());
2248 j(equal, &ok, Label::kNear);
2250 cmp(return_value, isolate()->factory()->undefined_value());
2251 j(equal, &ok, Label::kNear);
2253 cmp(return_value, isolate()->factory()->true_value());
2254 j(equal, &ok, Label::kNear);
2256 cmp(return_value, isolate()->factory()->false_value());
2257 j(equal, &ok, Label::kNear);
2259 cmp(return_value, isolate()->factory()->null_value());
2260 j(equal, &ok, Label::kNear);
2262 Abort(kAPICallReturnedInvalidObject);
2267 bool restore_context = context_restore_operand != NULL;
2268 if (restore_context) {
2269 mov(esi, *context_restore_operand);
2271 LeaveApiExitFrame(!restore_context);
2272 ret(stack_space * kPointerSize);
2274 bind(&promote_scheduled_exception);
2276 FrameScope frame(this, StackFrame::INTERNAL);
2277 CallRuntime(Runtime::kPromoteScheduledException, 0);
2279 jmp(&exception_handled);
2281 // HandleScope limit has changed. Delete allocated extensions.
2282 ExternalReference delete_extensions =
2283 ExternalReference::delete_handle_scope_extensions(isolate());
2284 bind(&delete_allocated_handles);
2285 mov(Operand::StaticVariable(limit_address), edi);
2287 mov(Operand(esp, 0),
2288 Immediate(ExternalReference::isolate_address(isolate())));
2289 mov(eax, Immediate(delete_extensions));
2292 jmp(&leave_exit_frame);
2296 void MacroAssembler::JumpToExternalReference(const ExternalReference& ext) {
2297 // Set the entry point and jump to the C entry runtime stub.
2298 mov(ebx, Immediate(ext));
2299 CEntryStub ces(isolate(), 1);
2300 jmp(ces.GetCode(), RelocInfo::CODE_TARGET);
2304 void MacroAssembler::InvokePrologue(const ParameterCount& expected,
2305 const ParameterCount& actual,
2306 Handle<Code> code_constant,
2307 const Operand& code_operand,
2309 bool* definitely_mismatches,
2311 Label::Distance done_near,
2312 const CallWrapper& call_wrapper) {
2313 bool definitely_matches = false;
2314 *definitely_mismatches = false;
2316 if (expected.is_immediate()) {
2317 DCHECK(actual.is_immediate());
2318 if (expected.immediate() == actual.immediate()) {
2319 definitely_matches = true;
2321 mov(eax, actual.immediate());
2322 const int sentinel = SharedFunctionInfo::kDontAdaptArgumentsSentinel;
2323 if (expected.immediate() == sentinel) {
2324 // Don't worry about adapting arguments for builtins that
2325 // don't want that done. Skip adaption code by making it look
2326 // like we have a match between expected and actual number of
2328 definitely_matches = true;
2330 *definitely_mismatches = true;
2331 mov(ebx, expected.immediate());
2335 if (actual.is_immediate()) {
2336 // Expected is in register, actual is immediate. This is the
2337 // case when we invoke function values without going through the
2339 cmp(expected.reg(), actual.immediate());
2341 DCHECK(expected.reg().is(ebx));
2342 mov(eax, actual.immediate());
2343 } else if (!expected.reg().is(actual.reg())) {
2344 // Both expected and actual are in (different) registers. This
2345 // is the case when we invoke functions using call and apply.
2346 cmp(expected.reg(), actual.reg());
2348 DCHECK(actual.reg().is(eax));
2349 DCHECK(expected.reg().is(ebx));
2353 if (!definitely_matches) {
2354 Handle<Code> adaptor =
2355 isolate()->builtins()->ArgumentsAdaptorTrampoline();
2356 if (!code_constant.is_null()) {
2357 mov(edx, Immediate(code_constant));
2358 add(edx, Immediate(Code::kHeaderSize - kHeapObjectTag));
2359 } else if (!code_operand.is_reg(edx)) {
2360 mov(edx, code_operand);
2363 if (flag == CALL_FUNCTION) {
2364 call_wrapper.BeforeCall(CallSize(adaptor, RelocInfo::CODE_TARGET));
2365 call(adaptor, RelocInfo::CODE_TARGET);
2366 call_wrapper.AfterCall();
2367 if (!*definitely_mismatches) {
2368 jmp(done, done_near);
2371 jmp(adaptor, RelocInfo::CODE_TARGET);
2378 void MacroAssembler::InvokeCode(const Operand& code,
2379 const ParameterCount& expected,
2380 const ParameterCount& actual,
2382 const CallWrapper& call_wrapper) {
2383 // You can't call a function without a valid frame.
2384 DCHECK(flag == JUMP_FUNCTION || has_frame());
2387 bool definitely_mismatches = false;
2388 InvokePrologue(expected, actual, Handle<Code>::null(), code,
2389 &done, &definitely_mismatches, flag, Label::kNear,
2391 if (!definitely_mismatches) {
2392 if (flag == CALL_FUNCTION) {
2393 call_wrapper.BeforeCall(CallSize(code));
2395 call_wrapper.AfterCall();
2397 DCHECK(flag == JUMP_FUNCTION);
2405 void MacroAssembler::InvokeFunction(Register fun,
2406 const ParameterCount& actual,
2408 const CallWrapper& call_wrapper) {
2409 // You can't call a function without a valid frame.
2410 DCHECK(flag == JUMP_FUNCTION || has_frame());
2412 DCHECK(fun.is(edi));
2413 mov(edx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
2414 mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
2415 mov(ebx, FieldOperand(edx, SharedFunctionInfo::kFormalParameterCountOffset));
2418 ParameterCount expected(ebx);
2419 InvokeCode(FieldOperand(edi, JSFunction::kCodeEntryOffset),
2420 expected, actual, flag, call_wrapper);
2424 void MacroAssembler::InvokeFunction(Register fun,
2425 const ParameterCount& expected,
2426 const ParameterCount& actual,
2428 const CallWrapper& call_wrapper) {
2429 // You can't call a function without a valid frame.
2430 DCHECK(flag == JUMP_FUNCTION || has_frame());
2432 DCHECK(fun.is(edi));
2433 mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
2435 InvokeCode(FieldOperand(edi, JSFunction::kCodeEntryOffset),
2436 expected, actual, flag, call_wrapper);
2440 void MacroAssembler::InvokeFunction(Handle<JSFunction> function,
2441 const ParameterCount& expected,
2442 const ParameterCount& actual,
2444 const CallWrapper& call_wrapper) {
2445 LoadHeapObject(edi, function);
2446 InvokeFunction(edi, expected, actual, flag, call_wrapper);
2450 void MacroAssembler::InvokeBuiltin(Builtins::JavaScript id,
2452 const CallWrapper& call_wrapper) {
2453 // You can't call a builtin without a valid frame.
2454 DCHECK(flag == JUMP_FUNCTION || has_frame());
2456 // Rely on the assertion to check that the number of provided
2457 // arguments match the expected number of arguments. Fake a
2458 // parameter count to avoid emitting code to do the check.
2459 ParameterCount expected(0);
2460 GetBuiltinFunction(edi, id);
2461 InvokeCode(FieldOperand(edi, JSFunction::kCodeEntryOffset),
2462 expected, expected, flag, call_wrapper);
2466 void MacroAssembler::GetBuiltinFunction(Register target,
2467 Builtins::JavaScript id) {
2468 // Load the JavaScript builtin function from the builtins object.
2469 mov(target, Operand(esi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
2470 mov(target, FieldOperand(target, GlobalObject::kBuiltinsOffset));
2471 mov(target, FieldOperand(target,
2472 JSBuiltinsObject::OffsetOfFunctionWithId(id)));
2476 void MacroAssembler::GetBuiltinEntry(Register target, Builtins::JavaScript id) {
2477 DCHECK(!target.is(edi));
2478 // Load the JavaScript builtin function from the builtins object.
2479 GetBuiltinFunction(edi, id);
2480 // Load the code entry point from the function into the target register.
2481 mov(target, FieldOperand(edi, JSFunction::kCodeEntryOffset));
2485 void MacroAssembler::LoadContext(Register dst, int context_chain_length) {
2486 if (context_chain_length > 0) {
2487 // Move up the chain of contexts to the context containing the slot.
2488 mov(dst, Operand(esi, Context::SlotOffset(Context::PREVIOUS_INDEX)));
2489 for (int i = 1; i < context_chain_length; i++) {
2490 mov(dst, Operand(dst, Context::SlotOffset(Context::PREVIOUS_INDEX)));
2493 // Slot is in the current function context. Move it into the
2494 // destination register in case we store into it (the write barrier
2495 // cannot be allowed to destroy the context in esi).
2499 // We should not have found a with context by walking the context chain
2500 // (i.e., the static scope chain and runtime context chain do not agree).
2501 // A variable occurring in such a scope should have slot type LOOKUP and
2503 if (emit_debug_code()) {
2504 cmp(FieldOperand(dst, HeapObject::kMapOffset),
2505 isolate()->factory()->with_context_map());
2506 Check(not_equal, kVariableResolvedToWithContext);
2511 void MacroAssembler::LoadTransitionedArrayMapConditional(
2512 ElementsKind expected_kind,
2513 ElementsKind transitioned_kind,
2514 Register map_in_out,
2516 Label* no_map_match) {
2517 // Load the global or builtins object from the current context.
2518 mov(scratch, Operand(esi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
2519 mov(scratch, FieldOperand(scratch, GlobalObject::kNativeContextOffset));
2521 // Check that the function's map is the same as the expected cached map.
2522 mov(scratch, Operand(scratch,
2523 Context::SlotOffset(Context::JS_ARRAY_MAPS_INDEX)));
2525 size_t offset = expected_kind * kPointerSize +
2526 FixedArrayBase::kHeaderSize;
2527 cmp(map_in_out, FieldOperand(scratch, offset));
2528 j(not_equal, no_map_match);
2530 // Use the transitioned cached map.
2531 offset = transitioned_kind * kPointerSize +
2532 FixedArrayBase::kHeaderSize;
2533 mov(map_in_out, FieldOperand(scratch, offset));
2537 void MacroAssembler::LoadGlobalFunction(int index, Register function) {
2538 // Load the global or builtins object from the current context.
2540 Operand(esi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
2541 // Load the native context from the global or builtins object.
2543 FieldOperand(function, GlobalObject::kNativeContextOffset));
2544 // Load the function from the native context.
2545 mov(function, Operand(function, Context::SlotOffset(index)));
2549 void MacroAssembler::LoadGlobalFunctionInitialMap(Register function,
2551 // Load the initial map. The global functions all have initial maps.
2552 mov(map, FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
2553 if (emit_debug_code()) {
2555 CheckMap(map, isolate()->factory()->meta_map(), &fail, DO_SMI_CHECK);
2558 Abort(kGlobalFunctionsMustHaveInitialMap);
2564 // Store the value in register src in the safepoint register stack
2565 // slot for register dst.
2566 void MacroAssembler::StoreToSafepointRegisterSlot(Register dst, Register src) {
2567 mov(SafepointRegisterSlot(dst), src);
2571 void MacroAssembler::StoreToSafepointRegisterSlot(Register dst, Immediate src) {
2572 mov(SafepointRegisterSlot(dst), src);
2576 void MacroAssembler::LoadFromSafepointRegisterSlot(Register dst, Register src) {
2577 mov(dst, SafepointRegisterSlot(src));
2581 Operand MacroAssembler::SafepointRegisterSlot(Register reg) {
2582 return Operand(esp, SafepointRegisterStackIndex(reg.code()) * kPointerSize);
2586 int MacroAssembler::SafepointRegisterStackIndex(int reg_code) {
2587 // The registers are pushed starting with the lowest encoding,
2588 // which means that lowest encodings are furthest away from
2589 // the stack pointer.
2590 DCHECK(reg_code >= 0 && reg_code < kNumSafepointRegisters);
2591 return kNumSafepointRegisters - reg_code - 1;
2595 void MacroAssembler::LoadHeapObject(Register result,
2596 Handle<HeapObject> object) {
2597 AllowDeferredHandleDereference embedding_raw_address;
2598 if (isolate()->heap()->InNewSpace(*object)) {
2599 Handle<Cell> cell = isolate()->factory()->NewCell(object);
2600 mov(result, Operand::ForCell(cell));
2602 mov(result, object);
2607 void MacroAssembler::CmpHeapObject(Register reg, Handle<HeapObject> object) {
2608 AllowDeferredHandleDereference using_raw_address;
2609 if (isolate()->heap()->InNewSpace(*object)) {
2610 Handle<Cell> cell = isolate()->factory()->NewCell(object);
2611 cmp(reg, Operand::ForCell(cell));
2618 void MacroAssembler::PushHeapObject(Handle<HeapObject> object) {
2619 AllowDeferredHandleDereference using_raw_address;
2620 if (isolate()->heap()->InNewSpace(*object)) {
2621 Handle<Cell> cell = isolate()->factory()->NewCell(object);
2622 push(Operand::ForCell(cell));
2629 void MacroAssembler::Ret() {
2634 void MacroAssembler::Ret(int bytes_dropped, Register scratch) {
2635 if (is_uint16(bytes_dropped)) {
2639 add(esp, Immediate(bytes_dropped));
2646 void MacroAssembler::Drop(int stack_elements) {
2647 if (stack_elements > 0) {
2648 add(esp, Immediate(stack_elements * kPointerSize));
2653 void MacroAssembler::Move(Register dst, Register src) {
2660 void MacroAssembler::Move(Register dst, const Immediate& x) {
2662 xor_(dst, dst); // Shorter than mov of 32-bit immediate 0.
2669 void MacroAssembler::Move(const Operand& dst, const Immediate& x) {
2674 void MacroAssembler::Move(XMMRegister dst, uint32_t src) {
2678 unsigned cnt = base::bits::CountPopulation32(src);
2679 unsigned nlz = base::bits::CountLeadingZeros32(src);
2680 unsigned ntz = base::bits::CountTrailingZeros32(src);
2681 if (nlz + cnt + ntz == 32) {
2684 psrld(dst, 32 - cnt);
2686 pslld(dst, 32 - cnt);
2687 if (nlz != 0) psrld(dst, nlz);
2691 mov(eax, Immediate(src));
2692 movd(dst, Operand(eax));
2699 void MacroAssembler::Move(XMMRegister dst, uint64_t src) {
2700 uint32_t lower = static_cast<uint32_t>(src);
2701 uint32_t upper = static_cast<uint32_t>(src >> 32);
2705 unsigned cnt = base::bits::CountPopulation64(src);
2706 unsigned nlz = base::bits::CountLeadingZeros64(src);
2707 unsigned ntz = base::bits::CountTrailingZeros64(src);
2708 if (nlz + cnt + ntz == 64) {
2711 psrlq(dst, 64 - cnt);
2713 psllq(dst, 64 - cnt);
2714 if (nlz != 0) psrlq(dst, nlz);
2716 } else if (lower == 0) {
2719 } else if (CpuFeatures::IsSupported(SSE4_1)) {
2720 CpuFeatureScope scope(this, SSE4_1);
2722 Move(eax, Immediate(lower));
2723 movd(dst, Operand(eax));
2724 Move(eax, Immediate(upper));
2725 pinsrd(dst, Operand(eax), 1);
2728 push(Immediate(upper));
2729 push(Immediate(lower));
2730 movsd(dst, Operand(esp, 0));
2731 add(esp, Immediate(kDoubleSize));
2737 void MacroAssembler::SetCounter(StatsCounter* counter, int value) {
2738 if (FLAG_native_code_counters && counter->Enabled()) {
2739 mov(Operand::StaticVariable(ExternalReference(counter)), Immediate(value));
2744 void MacroAssembler::IncrementCounter(StatsCounter* counter, int value) {
2746 if (FLAG_native_code_counters && counter->Enabled()) {
2747 Operand operand = Operand::StaticVariable(ExternalReference(counter));
2751 add(operand, Immediate(value));
2757 void MacroAssembler::DecrementCounter(StatsCounter* counter, int value) {
2759 if (FLAG_native_code_counters && counter->Enabled()) {
2760 Operand operand = Operand::StaticVariable(ExternalReference(counter));
2764 sub(operand, Immediate(value));
2770 void MacroAssembler::IncrementCounter(Condition cc,
2771 StatsCounter* counter,
2774 if (FLAG_native_code_counters && counter->Enabled()) {
2776 j(NegateCondition(cc), &skip);
2778 IncrementCounter(counter, value);
2785 void MacroAssembler::DecrementCounter(Condition cc,
2786 StatsCounter* counter,
2789 if (FLAG_native_code_counters && counter->Enabled()) {
2791 j(NegateCondition(cc), &skip);
2793 DecrementCounter(counter, value);
2800 void MacroAssembler::Assert(Condition cc, BailoutReason reason) {
2801 if (emit_debug_code()) Check(cc, reason);
2805 void MacroAssembler::AssertFastElements(Register elements) {
2806 if (emit_debug_code()) {
2807 Factory* factory = isolate()->factory();
2809 cmp(FieldOperand(elements, HeapObject::kMapOffset),
2810 Immediate(factory->fixed_array_map()));
2812 cmp(FieldOperand(elements, HeapObject::kMapOffset),
2813 Immediate(factory->fixed_double_array_map()));
2815 cmp(FieldOperand(elements, HeapObject::kMapOffset),
2816 Immediate(factory->fixed_cow_array_map()));
2818 Abort(kJSObjectWithFastElementsMapHasSlowElements);
2824 void MacroAssembler::Check(Condition cc, BailoutReason reason) {
2828 // will not return here
2833 void MacroAssembler::CheckStackAlignment() {
2834 int frame_alignment = base::OS::ActivationFrameAlignment();
2835 int frame_alignment_mask = frame_alignment - 1;
2836 if (frame_alignment > kPointerSize) {
2837 DCHECK(base::bits::IsPowerOfTwo32(frame_alignment));
2838 Label alignment_as_expected;
2839 test(esp, Immediate(frame_alignment_mask));
2840 j(zero, &alignment_as_expected);
2841 // Abort if stack is not aligned.
2843 bind(&alignment_as_expected);
2848 void MacroAssembler::Abort(BailoutReason reason) {
2850 const char* msg = GetBailoutReason(reason);
2852 RecordComment("Abort message: ");
2856 if (FLAG_trap_on_abort) {
2862 push(Immediate(reinterpret_cast<intptr_t>(Smi::FromInt(reason))));
2863 // Disable stub call restrictions to always allow calls to abort.
2865 // We don't actually want to generate a pile of code for this, so just
2866 // claim there is a stack frame, without generating one.
2867 FrameScope scope(this, StackFrame::NONE);
2868 CallRuntime(Runtime::kAbort, 1);
2870 CallRuntime(Runtime::kAbort, 1);
2872 // will not return here
2877 void MacroAssembler::LoadInstanceDescriptors(Register map,
2878 Register descriptors) {
2879 mov(descriptors, FieldOperand(map, Map::kDescriptorsOffset));
2883 void MacroAssembler::NumberOfOwnDescriptors(Register dst, Register map) {
2884 mov(dst, FieldOperand(map, Map::kBitField3Offset));
2885 DecodeField<Map::NumberOfOwnDescriptorsBits>(dst);
2889 void MacroAssembler::LoadPowerOf2(XMMRegister dst,
2892 DCHECK(is_uintn(power + HeapNumber::kExponentBias,
2893 HeapNumber::kExponentBits));
2894 mov(scratch, Immediate(power + HeapNumber::kExponentBias));
2896 psllq(dst, HeapNumber::kMantissaBits);
2900 void MacroAssembler::LookupNumberStringCache(Register object,
2905 // Use of registers. Register result is used as a temporary.
2906 Register number_string_cache = result;
2907 Register mask = scratch1;
2908 Register scratch = scratch2;
2910 // Load the number string cache.
2911 LoadRoot(number_string_cache, Heap::kNumberStringCacheRootIndex);
2912 // Make the hash mask from the length of the number string cache. It
2913 // contains two elements (number and string) for each cache entry.
2914 mov(mask, FieldOperand(number_string_cache, FixedArray::kLengthOffset));
2915 shr(mask, kSmiTagSize + 1); // Untag length and divide it by two.
2916 sub(mask, Immediate(1)); // Make mask.
2918 // Calculate the entry in the number string cache. The hash value in the
2919 // number string cache for smis is just the smi value, and the hash for
2920 // doubles is the xor of the upper and lower words. See
2921 // Heap::GetNumberStringCache.
2922 Label smi_hash_calculated;
2923 Label load_result_from_cache;
2925 STATIC_ASSERT(kSmiTag == 0);
2926 JumpIfNotSmi(object, ¬_smi, Label::kNear);
2927 mov(scratch, object);
2929 jmp(&smi_hash_calculated, Label::kNear);
2931 cmp(FieldOperand(object, HeapObject::kMapOffset),
2932 isolate()->factory()->heap_number_map());
2933 j(not_equal, not_found);
2934 STATIC_ASSERT(8 == kDoubleSize);
2935 mov(scratch, FieldOperand(object, HeapNumber::kValueOffset));
2936 xor_(scratch, FieldOperand(object, HeapNumber::kValueOffset + 4));
2937 // Object is heap number and hash is now in scratch. Calculate cache index.
2938 and_(scratch, mask);
2939 Register index = scratch;
2940 Register probe = mask;
2942 FieldOperand(number_string_cache,
2944 times_twice_pointer_size,
2945 FixedArray::kHeaderSize));
2946 JumpIfSmi(probe, not_found);
2947 movsd(xmm0, FieldOperand(object, HeapNumber::kValueOffset));
2948 ucomisd(xmm0, FieldOperand(probe, HeapNumber::kValueOffset));
2949 j(parity_even, not_found); // Bail out if NaN is involved.
2950 j(not_equal, not_found); // The cache did not contain this value.
2951 jmp(&load_result_from_cache, Label::kNear);
2953 bind(&smi_hash_calculated);
2954 // Object is smi and hash is now in scratch. Calculate cache index.
2955 and_(scratch, mask);
2956 // Check if the entry is the smi we are looking for.
2958 FieldOperand(number_string_cache,
2960 times_twice_pointer_size,
2961 FixedArray::kHeaderSize));
2962 j(not_equal, not_found);
2964 // Get the result from the cache.
2965 bind(&load_result_from_cache);
2967 FieldOperand(number_string_cache,
2969 times_twice_pointer_size,
2970 FixedArray::kHeaderSize + kPointerSize));
2971 IncrementCounter(isolate()->counters()->number_to_string_native(), 1);
2975 void MacroAssembler::JumpIfInstanceTypeIsNotSequentialOneByte(
2976 Register instance_type, Register scratch, Label* failure) {
2977 if (!scratch.is(instance_type)) {
2978 mov(scratch, instance_type);
2981 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask);
2982 cmp(scratch, kStringTag | kSeqStringTag | kOneByteStringTag);
2983 j(not_equal, failure);
2987 void MacroAssembler::JumpIfNotBothSequentialOneByteStrings(Register object1,
2992 // Check that both objects are not smis.
2993 STATIC_ASSERT(kSmiTag == 0);
2994 mov(scratch1, object1);
2995 and_(scratch1, object2);
2996 JumpIfSmi(scratch1, failure);
2998 // Load instance type for both strings.
2999 mov(scratch1, FieldOperand(object1, HeapObject::kMapOffset));
3000 mov(scratch2, FieldOperand(object2, HeapObject::kMapOffset));
3001 movzx_b(scratch1, FieldOperand(scratch1, Map::kInstanceTypeOffset));
3002 movzx_b(scratch2, FieldOperand(scratch2, Map::kInstanceTypeOffset));
3004 // Check that both are flat one-byte strings.
3005 const int kFlatOneByteStringMask =
3006 kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
3007 const int kFlatOneByteStringTag =
3008 kStringTag | kOneByteStringTag | kSeqStringTag;
3009 // Interleave bits from both instance types and compare them in one check.
3010 DCHECK_EQ(0, kFlatOneByteStringMask & (kFlatOneByteStringMask << 3));
3011 and_(scratch1, kFlatOneByteStringMask);
3012 and_(scratch2, kFlatOneByteStringMask);
3013 lea(scratch1, Operand(scratch1, scratch2, times_8, 0));
3014 cmp(scratch1, kFlatOneByteStringTag | (kFlatOneByteStringTag << 3));
3015 j(not_equal, failure);
3019 void MacroAssembler::JumpIfNotUniqueNameInstanceType(Operand operand,
3020 Label* not_unique_name,
3021 Label::Distance distance) {
3022 STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0);
3024 test(operand, Immediate(kIsNotStringMask | kIsNotInternalizedMask));
3026 cmpb(operand, static_cast<uint8_t>(SYMBOL_TYPE));
3027 j(not_equal, not_unique_name, distance);
3033 void MacroAssembler::EmitSeqStringSetCharCheck(Register string,
3036 uint32_t encoding_mask) {
3038 JumpIfNotSmi(string, &is_object, Label::kNear);
3043 mov(value, FieldOperand(string, HeapObject::kMapOffset));
3044 movzx_b(value, FieldOperand(value, Map::kInstanceTypeOffset));
3046 and_(value, Immediate(kStringRepresentationMask | kStringEncodingMask));
3047 cmp(value, Immediate(encoding_mask));
3049 Check(equal, kUnexpectedStringType);
3051 // The index is assumed to be untagged coming in, tag it to compare with the
3052 // string length without using a temp register, it is restored at the end of
3055 Check(no_overflow, kIndexIsTooLarge);
3057 cmp(index, FieldOperand(string, String::kLengthOffset));
3058 Check(less, kIndexIsTooLarge);
3060 cmp(index, Immediate(Smi::FromInt(0)));
3061 Check(greater_equal, kIndexIsNegative);
3063 // Restore the index
3068 void MacroAssembler::PrepareCallCFunction(int num_arguments, Register scratch) {
3069 int frame_alignment = base::OS::ActivationFrameAlignment();
3070 if (frame_alignment != 0) {
3071 // Make stack end at alignment and make room for num_arguments words
3072 // and the original value of esp.
3074 sub(esp, Immediate((num_arguments + 1) * kPointerSize));
3075 DCHECK(base::bits::IsPowerOfTwo32(frame_alignment));
3076 and_(esp, -frame_alignment);
3077 mov(Operand(esp, num_arguments * kPointerSize), scratch);
3079 sub(esp, Immediate(num_arguments * kPointerSize));
3084 void MacroAssembler::CallCFunction(ExternalReference function,
3085 int num_arguments) {
3086 // Trashing eax is ok as it will be the return value.
3087 mov(eax, Immediate(function));
3088 CallCFunction(eax, num_arguments);
3092 void MacroAssembler::CallCFunction(Register function,
3093 int num_arguments) {
3094 DCHECK(has_frame());
3095 // Check stack alignment.
3096 if (emit_debug_code()) {
3097 CheckStackAlignment();
3101 if (base::OS::ActivationFrameAlignment() != 0) {
3102 mov(esp, Operand(esp, num_arguments * kPointerSize));
3104 add(esp, Immediate(num_arguments * kPointerSize));
3110 bool AreAliased(Register reg1,
3118 int n_of_valid_regs = reg1.is_valid() + reg2.is_valid() +
3119 reg3.is_valid() + reg4.is_valid() + reg5.is_valid() + reg6.is_valid() +
3120 reg7.is_valid() + reg8.is_valid();
3123 if (reg1.is_valid()) regs |= reg1.bit();
3124 if (reg2.is_valid()) regs |= reg2.bit();
3125 if (reg3.is_valid()) regs |= reg3.bit();
3126 if (reg4.is_valid()) regs |= reg4.bit();
3127 if (reg5.is_valid()) regs |= reg5.bit();
3128 if (reg6.is_valid()) regs |= reg6.bit();
3129 if (reg7.is_valid()) regs |= reg7.bit();
3130 if (reg8.is_valid()) regs |= reg8.bit();
3131 int n_of_non_aliasing_regs = NumRegs(regs);
3133 return n_of_valid_regs != n_of_non_aliasing_regs;
3138 CodePatcher::CodePatcher(byte* address, int size)
3139 : address_(address),
3141 masm_(NULL, address, size + Assembler::kGap) {
3142 // Create a new macro assembler pointing to the address of the code to patch.
3143 // The size is adjusted with kGap on order for the assembler to generate size
3144 // bytes of instructions without failing with buffer size constraints.
3145 DCHECK(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
3149 CodePatcher::~CodePatcher() {
3150 // Indicate that code has changed.
3151 CpuFeatures::FlushICache(address_, size_);
3153 // Check that the code was patched as expected.
3154 DCHECK(masm_.pc_ == address_ + size_);
3155 DCHECK(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
3159 void MacroAssembler::CheckPageFlag(
3164 Label* condition_met,
3165 Label::Distance condition_met_distance) {
3166 DCHECK(cc == zero || cc == not_zero);
3167 if (scratch.is(object)) {
3168 and_(scratch, Immediate(~Page::kPageAlignmentMask));
3170 mov(scratch, Immediate(~Page::kPageAlignmentMask));
3171 and_(scratch, object);
3173 if (mask < (1 << kBitsPerByte)) {
3174 test_b(Operand(scratch, MemoryChunk::kFlagsOffset),
3175 static_cast<uint8_t>(mask));
3177 test(Operand(scratch, MemoryChunk::kFlagsOffset), Immediate(mask));
3179 j(cc, condition_met, condition_met_distance);
3183 void MacroAssembler::CheckPageFlagForMap(
3187 Label* condition_met,
3188 Label::Distance condition_met_distance) {
3189 DCHECK(cc == zero || cc == not_zero);
3190 Page* page = Page::FromAddress(map->address());
3191 DCHECK(!serializer_enabled()); // Serializer cannot match page_flags.
3192 ExternalReference reference(ExternalReference::page_flags(page));
3193 // The inlined static address check of the page's flags relies
3194 // on maps never being compacted.
3195 DCHECK(!isolate()->heap()->mark_compact_collector()->
3196 IsOnEvacuationCandidate(*map));
3197 if (mask < (1 << kBitsPerByte)) {
3198 test_b(Operand::StaticVariable(reference), static_cast<uint8_t>(mask));
3200 test(Operand::StaticVariable(reference), Immediate(mask));
3202 j(cc, condition_met, condition_met_distance);
3206 void MacroAssembler::CheckMapDeprecated(Handle<Map> map,
3208 Label* if_deprecated) {
3209 if (map->CanBeDeprecated()) {
3211 mov(scratch, FieldOperand(scratch, Map::kBitField3Offset));
3212 and_(scratch, Immediate(Map::Deprecated::kMask));
3213 j(not_zero, if_deprecated);
3218 void MacroAssembler::JumpIfBlack(Register object,
3222 Label::Distance on_black_near) {
3223 HasColor(object, scratch0, scratch1,
3224 on_black, on_black_near,
3225 1, 0); // kBlackBitPattern.
3226 DCHECK(strcmp(Marking::kBlackBitPattern, "10") == 0);
3230 void MacroAssembler::HasColor(Register object,
3231 Register bitmap_scratch,
3232 Register mask_scratch,
3234 Label::Distance has_color_distance,
3237 DCHECK(!AreAliased(object, bitmap_scratch, mask_scratch, ecx));
3239 GetMarkBits(object, bitmap_scratch, mask_scratch);
3241 Label other_color, word_boundary;
3242 test(mask_scratch, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
3243 j(first_bit == 1 ? zero : not_zero, &other_color, Label::kNear);
3244 add(mask_scratch, mask_scratch); // Shift left 1 by adding.
3245 j(zero, &word_boundary, Label::kNear);
3246 test(mask_scratch, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
3247 j(second_bit == 1 ? not_zero : zero, has_color, has_color_distance);
3248 jmp(&other_color, Label::kNear);
3250 bind(&word_boundary);
3251 test_b(Operand(bitmap_scratch, MemoryChunk::kHeaderSize + kPointerSize), 1);
3253 j(second_bit == 1 ? not_zero : zero, has_color, has_color_distance);
3258 void MacroAssembler::GetMarkBits(Register addr_reg,
3259 Register bitmap_reg,
3260 Register mask_reg) {
3261 DCHECK(!AreAliased(addr_reg, mask_reg, bitmap_reg, ecx));
3262 mov(bitmap_reg, Immediate(~Page::kPageAlignmentMask));
3263 and_(bitmap_reg, addr_reg);
3266 Bitmap::kBitsPerCellLog2 + kPointerSizeLog2 - Bitmap::kBytesPerCellLog2;
3269 (Page::kPageAlignmentMask >> shift) & ~(Bitmap::kBytesPerCell - 1));
3271 add(bitmap_reg, ecx);
3273 shr(ecx, kPointerSizeLog2);
3274 and_(ecx, (1 << Bitmap::kBitsPerCellLog2) - 1);
3275 mov(mask_reg, Immediate(1));
3280 void MacroAssembler::EnsureNotWhite(
3282 Register bitmap_scratch,
3283 Register mask_scratch,
3284 Label* value_is_white_and_not_data,
3285 Label::Distance distance) {
3286 DCHECK(!AreAliased(value, bitmap_scratch, mask_scratch, ecx));
3287 GetMarkBits(value, bitmap_scratch, mask_scratch);
3289 // If the value is black or grey we don't need to do anything.
3290 DCHECK(strcmp(Marking::kWhiteBitPattern, "00") == 0);
3291 DCHECK(strcmp(Marking::kBlackBitPattern, "10") == 0);
3292 DCHECK(strcmp(Marking::kGreyBitPattern, "11") == 0);
3293 DCHECK(strcmp(Marking::kImpossibleBitPattern, "01") == 0);
3297 // Since both black and grey have a 1 in the first position and white does
3298 // not have a 1 there we only need to check one bit.
3299 test(mask_scratch, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
3300 j(not_zero, &done, Label::kNear);
3302 if (emit_debug_code()) {
3303 // Check for impossible bit pattern.
3306 // shl. May overflow making the check conservative.
3307 add(mask_scratch, mask_scratch);
3308 test(mask_scratch, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
3309 j(zero, &ok, Label::kNear);
3315 // Value is white. We check whether it is data that doesn't need scanning.
3316 // Currently only checks for HeapNumber and non-cons strings.
3317 Register map = ecx; // Holds map while checking type.
3318 Register length = ecx; // Holds length of object after checking type.
3319 Label not_heap_number;
3320 Label is_data_object;
3322 // Check for heap-number
3323 mov(map, FieldOperand(value, HeapObject::kMapOffset));
3324 cmp(map, isolate()->factory()->heap_number_map());
3325 j(not_equal, ¬_heap_number, Label::kNear);
3326 mov(length, Immediate(HeapNumber::kSize));
3327 jmp(&is_data_object, Label::kNear);
3329 bind(¬_heap_number);
3330 // Check for strings.
3331 DCHECK(kIsIndirectStringTag == 1 && kIsIndirectStringMask == 1);
3332 DCHECK(kNotStringTag == 0x80 && kIsNotStringMask == 0x80);
3333 // If it's a string and it's not a cons string then it's an object containing
3335 Register instance_type = ecx;
3336 movzx_b(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
3337 test_b(instance_type, kIsIndirectStringMask | kIsNotStringMask);
3338 j(not_zero, value_is_white_and_not_data);
3339 // It's a non-indirect (non-cons and non-slice) string.
3340 // If it's external, the length is just ExternalString::kSize.
3341 // Otherwise it's String::kHeaderSize + string->length() * (1 or 2).
3343 // External strings are the only ones with the kExternalStringTag bit
3345 DCHECK_EQ(0, kSeqStringTag & kExternalStringTag);
3346 DCHECK_EQ(0, kConsStringTag & kExternalStringTag);
3347 test_b(instance_type, kExternalStringTag);
3348 j(zero, ¬_external, Label::kNear);
3349 mov(length, Immediate(ExternalString::kSize));
3350 jmp(&is_data_object, Label::kNear);
3352 bind(¬_external);
3353 // Sequential string, either Latin1 or UC16.
3354 DCHECK(kOneByteStringTag == 0x04);
3355 and_(length, Immediate(kStringEncodingMask));
3356 xor_(length, Immediate(kStringEncodingMask));
3357 add(length, Immediate(0x04));
3358 // Value now either 4 (if Latin1) or 8 (if UC16), i.e., char-size shifted
3359 // by 2. If we multiply the string length as smi by this, it still
3360 // won't overflow a 32-bit value.
3361 DCHECK_EQ(SeqOneByteString::kMaxSize, SeqTwoByteString::kMaxSize);
3362 DCHECK(SeqOneByteString::kMaxSize <=
3363 static_cast<int>(0xffffffffu >> (2 + kSmiTagSize)));
3364 imul(length, FieldOperand(value, String::kLengthOffset));
3365 shr(length, 2 + kSmiTagSize + kSmiShiftSize);
3366 add(length, Immediate(SeqString::kHeaderSize + kObjectAlignmentMask));
3367 and_(length, Immediate(~kObjectAlignmentMask));
3369 bind(&is_data_object);
3370 // Value is a data object, and it is white. Mark it black. Since we know
3371 // that the object is white we can make it black by flipping one bit.
3372 or_(Operand(bitmap_scratch, MemoryChunk::kHeaderSize), mask_scratch);
3374 and_(bitmap_scratch, Immediate(~Page::kPageAlignmentMask));
3375 add(Operand(bitmap_scratch, MemoryChunk::kLiveBytesOffset),
3377 if (emit_debug_code()) {
3378 mov(length, Operand(bitmap_scratch, MemoryChunk::kLiveBytesOffset));
3379 cmp(length, Operand(bitmap_scratch, MemoryChunk::kSizeOffset));
3380 Check(less_equal, kLiveBytesCountOverflowChunkSize);
3387 void MacroAssembler::EnumLength(Register dst, Register map) {
3388 STATIC_ASSERT(Map::EnumLengthBits::kShift == 0);
3389 mov(dst, FieldOperand(map, Map::kBitField3Offset));
3390 and_(dst, Immediate(Map::EnumLengthBits::kMask));
3395 void MacroAssembler::CheckEnumCache(Label* call_runtime) {
3399 // Check if the enum length field is properly initialized, indicating that
3400 // there is an enum cache.
3401 mov(ebx, FieldOperand(ecx, HeapObject::kMapOffset));
3403 EnumLength(edx, ebx);
3404 cmp(edx, Immediate(Smi::FromInt(kInvalidEnumCacheSentinel)));
3405 j(equal, call_runtime);
3410 mov(ebx, FieldOperand(ecx, HeapObject::kMapOffset));
3412 // For all objects but the receiver, check that the cache is empty.
3413 EnumLength(edx, ebx);
3414 cmp(edx, Immediate(Smi::FromInt(0)));
3415 j(not_equal, call_runtime);
3419 // Check that there are no elements. Register rcx contains the current JS
3420 // object we've reached through the prototype chain.
3422 mov(ecx, FieldOperand(ecx, JSObject::kElementsOffset));
3423 cmp(ecx, isolate()->factory()->empty_fixed_array());
3424 j(equal, &no_elements);
3426 // Second chance, the object may be using the empty slow element dictionary.
3427 cmp(ecx, isolate()->factory()->empty_slow_element_dictionary());
3428 j(not_equal, call_runtime);
3431 mov(ecx, FieldOperand(ebx, Map::kPrototypeOffset));
3432 cmp(ecx, isolate()->factory()->null_value());
3433 j(not_equal, &next);
3437 void MacroAssembler::TestJSArrayForAllocationMemento(
3438 Register receiver_reg,
3439 Register scratch_reg,
3440 Label* no_memento_found) {
3441 ExternalReference new_space_start =
3442 ExternalReference::new_space_start(isolate());
3443 ExternalReference new_space_allocation_top =
3444 ExternalReference::new_space_allocation_top_address(isolate());
3446 lea(scratch_reg, Operand(receiver_reg,
3447 JSArray::kSize + AllocationMemento::kSize - kHeapObjectTag));
3448 cmp(scratch_reg, Immediate(new_space_start));
3449 j(less, no_memento_found);
3450 cmp(scratch_reg, Operand::StaticVariable(new_space_allocation_top));
3451 j(greater, no_memento_found);
3452 cmp(MemOperand(scratch_reg, -AllocationMemento::kSize),
3453 Immediate(isolate()->factory()->allocation_memento_map()));
3457 void MacroAssembler::JumpIfDictionaryInPrototypeChain(
3462 DCHECK(!scratch1.is(scratch0));
3463 Factory* factory = isolate()->factory();
3464 Register current = scratch0;
3467 // scratch contained elements pointer.
3468 mov(current, object);
3470 // Loop based on the map going up the prototype chain.
3472 mov(current, FieldOperand(current, HeapObject::kMapOffset));
3473 mov(scratch1, FieldOperand(current, Map::kBitField2Offset));
3474 DecodeField<Map::ElementsKindBits>(scratch1);
3475 cmp(scratch1, Immediate(DICTIONARY_ELEMENTS));
3477 mov(current, FieldOperand(current, Map::kPrototypeOffset));
3478 cmp(current, Immediate(factory->null_value()));
3479 j(not_equal, &loop_again);
3483 void MacroAssembler::TruncatingDiv(Register dividend, int32_t divisor) {
3484 DCHECK(!dividend.is(eax));
3485 DCHECK(!dividend.is(edx));
3486 base::MagicNumbersForDivision<uint32_t> mag =
3487 base::SignedDivisionByConstant(static_cast<uint32_t>(divisor));
3488 mov(eax, Immediate(mag.multiplier));
3490 bool neg = (mag.multiplier & (static_cast<uint32_t>(1) << 31)) != 0;
3491 if (divisor > 0 && neg) add(edx, dividend);
3492 if (divisor < 0 && !neg && mag.multiplier > 0) sub(edx, dividend);
3493 if (mag.shift > 0) sar(edx, mag.shift);
3500 void MacroAssembler::absps(XMMRegister dst) {
3501 static const struct V8_ALIGNED(16) {
3506 } float_absolute_constant =
3507 { 0x7FFFFFFF, 0x7FFFFFFF, 0x7FFFFFFF, 0x7FFFFFFF };
3509 Operand(reinterpret_cast<int32_t>(&float_absolute_constant),
3510 RelocInfo::NONE32));
3514 void MacroAssembler::abspd(XMMRegister dst) {
3515 static const struct V8_ALIGNED(16) {
3520 } double_absolute_constant =
3521 { 0xFFFFFFFF, 0x7FFFFFFF, 0xFFFFFFFF, 0x7FFFFFFF };
3523 Operand(reinterpret_cast<int32_t>(&double_absolute_constant),
3524 RelocInfo::NONE32));
3528 void MacroAssembler::notps(XMMRegister dst) {
3529 static const struct V8_ALIGNED(16) {
3534 } float_not_constant =
3535 { 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF, 0xFFFFFFFF };
3537 Operand(reinterpret_cast<int32_t>(&float_not_constant),
3538 RelocInfo::NONE32));
3542 void MacroAssembler::negateps(XMMRegister dst) {
3543 static const struct V8_ALIGNED(16) {
3548 } float_negate_constant =
3549 { 0x80000000, 0x80000000, 0x80000000, 0x80000000 };
3551 Operand(reinterpret_cast<int32_t>(&float_negate_constant),
3552 RelocInfo::NONE32));
3556 void MacroAssembler::negatepd(XMMRegister dst) {
3557 static const struct V8_ALIGNED(16) {
3562 } double_negate_constant =
3563 { 0x00000000, 0x80000000, 0x00000000, 0x80000000 };
3565 Operand(reinterpret_cast<int32_t>(&double_negate_constant),
3566 RelocInfo::NONE32));
3570 void MacroAssembler::pnegd(XMMRegister dst) {
3571 static const struct V8_ALIGNED(16) {
3576 } int32_one_constant = { 0x1, 0x1, 0x1, 0x1 };
3579 Operand(reinterpret_cast<int32_t>(&int32_one_constant),
3580 RelocInfo::NONE32));
3584 } } // namespace v8::internal
3586 #endif // V8_TARGET_ARCH_IA32