1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Redistribution and use in source and binary forms, with or without
3 // modification, are permitted provided that the following conditions are
6 // * Redistributions of source code must retain the above copyright
7 // notice, this list of conditions and the following disclaimer.
8 // * Redistributions in binary form must reproduce the above
9 // copyright notice, this list of conditions and the following
10 // disclaimer in the documentation and/or other materials provided
11 // with the distribution.
12 // * Neither the name of Google Inc. nor the names of its
13 // contributors may be used to endorse or promote products derived
14 // from this software without specific prior written permission.
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17 // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18 // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19 // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20 // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21 // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22 // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30 #if V8_TARGET_ARCH_X64
32 #include "bootstrapper.h"
33 #include "code-stubs.h"
34 #include "regexp-macro-assembler.h"
35 #include "stub-cache.h"
42 void FastNewClosureStub::InitializeInterfaceDescriptor(
44 CodeStubInterfaceDescriptor* descriptor) {
45 static Register registers[] = { rbx };
46 descriptor->register_param_count_ = 1;
47 descriptor->register_params_ = registers;
48 descriptor->deoptimization_handler_ =
49 Runtime::FunctionForId(Runtime::kNewClosureFromStubFailure)->entry;
53 void ToNumberStub::InitializeInterfaceDescriptor(
55 CodeStubInterfaceDescriptor* descriptor) {
56 static Register registers[] = { rax };
57 descriptor->register_param_count_ = 1;
58 descriptor->register_params_ = registers;
59 descriptor->deoptimization_handler_ = NULL;
63 void NumberToStringStub::InitializeInterfaceDescriptor(
65 CodeStubInterfaceDescriptor* descriptor) {
66 static Register registers[] = { rax };
67 descriptor->register_param_count_ = 1;
68 descriptor->register_params_ = registers;
69 descriptor->deoptimization_handler_ =
70 Runtime::FunctionForId(Runtime::kNumberToString)->entry;
74 void FastCloneShallowArrayStub::InitializeInterfaceDescriptor(
76 CodeStubInterfaceDescriptor* descriptor) {
77 static Register registers[] = { rax, rbx, rcx };
78 descriptor->register_param_count_ = 3;
79 descriptor->register_params_ = registers;
80 descriptor->deoptimization_handler_ =
81 Runtime::FunctionForId(Runtime::kCreateArrayLiteralShallow)->entry;
85 void FastCloneShallowObjectStub::InitializeInterfaceDescriptor(
87 CodeStubInterfaceDescriptor* descriptor) {
88 static Register registers[] = { rax, rbx, rcx, rdx };
89 descriptor->register_param_count_ = 4;
90 descriptor->register_params_ = registers;
91 descriptor->deoptimization_handler_ =
92 Runtime::FunctionForId(Runtime::kCreateObjectLiteral)->entry;
96 void CreateAllocationSiteStub::InitializeInterfaceDescriptor(
98 CodeStubInterfaceDescriptor* descriptor) {
99 static Register registers[] = { rbx };
100 descriptor->register_param_count_ = 1;
101 descriptor->register_params_ = registers;
102 descriptor->deoptimization_handler_ = NULL;
106 void KeyedLoadFastElementStub::InitializeInterfaceDescriptor(
108 CodeStubInterfaceDescriptor* descriptor) {
109 static Register registers[] = { rdx, rax };
110 descriptor->register_param_count_ = 2;
111 descriptor->register_params_ = registers;
112 descriptor->deoptimization_handler_ =
113 FUNCTION_ADDR(KeyedLoadIC_MissFromStubFailure);
117 void LoadFieldStub::InitializeInterfaceDescriptor(
119 CodeStubInterfaceDescriptor* descriptor) {
120 static Register registers[] = { rax };
121 descriptor->register_param_count_ = 1;
122 descriptor->register_params_ = registers;
123 descriptor->deoptimization_handler_ = NULL;
127 void KeyedLoadFieldStub::InitializeInterfaceDescriptor(
129 CodeStubInterfaceDescriptor* descriptor) {
130 static Register registers[] = { rdx };
131 descriptor->register_param_count_ = 1;
132 descriptor->register_params_ = registers;
133 descriptor->deoptimization_handler_ = NULL;
137 void KeyedStoreFastElementStub::InitializeInterfaceDescriptor(
139 CodeStubInterfaceDescriptor* descriptor) {
140 static Register registers[] = { rdx, rcx, rax };
141 descriptor->register_param_count_ = 3;
142 descriptor->register_params_ = registers;
143 descriptor->deoptimization_handler_ =
144 FUNCTION_ADDR(KeyedStoreIC_MissFromStubFailure);
148 void TransitionElementsKindStub::InitializeInterfaceDescriptor(
150 CodeStubInterfaceDescriptor* descriptor) {
151 static Register registers[] = { rax, rbx };
152 descriptor->register_param_count_ = 2;
153 descriptor->register_params_ = registers;
154 descriptor->deoptimization_handler_ =
155 Runtime::FunctionForId(Runtime::kTransitionElementsKind)->entry;
159 void BinaryOpStub::InitializeInterfaceDescriptor(
161 CodeStubInterfaceDescriptor* descriptor) {
162 static Register registers[] = { rdx, rax };
163 descriptor->register_param_count_ = 2;
164 descriptor->register_params_ = registers;
165 descriptor->deoptimization_handler_ = FUNCTION_ADDR(BinaryOpIC_Miss);
166 descriptor->SetMissHandler(
167 ExternalReference(IC_Utility(IC::kBinaryOpIC_Miss), isolate));
171 static void InitializeArrayConstructorDescriptor(
173 CodeStubInterfaceDescriptor* descriptor,
174 int constant_stack_parameter_count) {
176 // rax -- number of arguments
178 // rbx -- type info cell with elements kind
179 static Register registers[] = { rdi, rbx };
180 descriptor->register_param_count_ = 2;
181 if (constant_stack_parameter_count != 0) {
182 // stack param count needs (constructor pointer, and single argument)
183 descriptor->stack_parameter_count_ = rax;
185 descriptor->hint_stack_parameter_count_ = constant_stack_parameter_count;
186 descriptor->register_params_ = registers;
187 descriptor->function_mode_ = JS_FUNCTION_STUB_MODE;
188 descriptor->deoptimization_handler_ =
189 Runtime::FunctionForId(Runtime::kArrayConstructor)->entry;
193 static void InitializeInternalArrayConstructorDescriptor(
195 CodeStubInterfaceDescriptor* descriptor,
196 int constant_stack_parameter_count) {
198 // rax -- number of arguments
199 // rdi -- constructor function
200 static Register registers[] = { rdi };
201 descriptor->register_param_count_ = 1;
203 if (constant_stack_parameter_count != 0) {
204 // stack param count needs (constructor pointer, and single argument)
205 descriptor->stack_parameter_count_ = rax;
207 descriptor->hint_stack_parameter_count_ = constant_stack_parameter_count;
208 descriptor->register_params_ = registers;
209 descriptor->function_mode_ = JS_FUNCTION_STUB_MODE;
210 descriptor->deoptimization_handler_ =
211 Runtime::FunctionForId(Runtime::kInternalArrayConstructor)->entry;
215 void ArrayNoArgumentConstructorStub::InitializeInterfaceDescriptor(
217 CodeStubInterfaceDescriptor* descriptor) {
218 InitializeArrayConstructorDescriptor(isolate, descriptor, 0);
222 void ArraySingleArgumentConstructorStub::InitializeInterfaceDescriptor(
224 CodeStubInterfaceDescriptor* descriptor) {
225 InitializeArrayConstructorDescriptor(isolate, descriptor, 1);
229 void ArrayNArgumentsConstructorStub::InitializeInterfaceDescriptor(
231 CodeStubInterfaceDescriptor* descriptor) {
232 InitializeArrayConstructorDescriptor(isolate, descriptor, -1);
236 void InternalArrayNoArgumentConstructorStub::InitializeInterfaceDescriptor(
238 CodeStubInterfaceDescriptor* descriptor) {
239 InitializeInternalArrayConstructorDescriptor(isolate, descriptor, 0);
243 void InternalArraySingleArgumentConstructorStub::InitializeInterfaceDescriptor(
245 CodeStubInterfaceDescriptor* descriptor) {
246 InitializeInternalArrayConstructorDescriptor(isolate, descriptor, 1);
250 void InternalArrayNArgumentsConstructorStub::InitializeInterfaceDescriptor(
252 CodeStubInterfaceDescriptor* descriptor) {
253 InitializeInternalArrayConstructorDescriptor(isolate, descriptor, -1);
257 void CompareNilICStub::InitializeInterfaceDescriptor(
259 CodeStubInterfaceDescriptor* descriptor) {
260 static Register registers[] = { rax };
261 descriptor->register_param_count_ = 1;
262 descriptor->register_params_ = registers;
263 descriptor->deoptimization_handler_ =
264 FUNCTION_ADDR(CompareNilIC_Miss);
265 descriptor->SetMissHandler(
266 ExternalReference(IC_Utility(IC::kCompareNilIC_Miss), isolate));
270 void ToBooleanStub::InitializeInterfaceDescriptor(
272 CodeStubInterfaceDescriptor* descriptor) {
273 static Register registers[] = { rax };
274 descriptor->register_param_count_ = 1;
275 descriptor->register_params_ = registers;
276 descriptor->deoptimization_handler_ =
277 FUNCTION_ADDR(ToBooleanIC_Miss);
278 descriptor->SetMissHandler(
279 ExternalReference(IC_Utility(IC::kToBooleanIC_Miss), isolate));
283 void StoreGlobalStub::InitializeInterfaceDescriptor(
285 CodeStubInterfaceDescriptor* descriptor) {
286 static Register registers[] = { rdx, rcx, rax };
287 descriptor->register_param_count_ = 3;
288 descriptor->register_params_ = registers;
289 descriptor->deoptimization_handler_ =
290 FUNCTION_ADDR(StoreIC_MissFromStubFailure);
294 void ElementsTransitionAndStoreStub::InitializeInterfaceDescriptor(
296 CodeStubInterfaceDescriptor* descriptor) {
297 static Register registers[] = { rax, rbx, rcx, rdx };
298 descriptor->register_param_count_ = 4;
299 descriptor->register_params_ = registers;
300 descriptor->deoptimization_handler_ =
301 FUNCTION_ADDR(ElementsTransitionAndStoreIC_Miss);
305 #define __ ACCESS_MASM(masm)
308 void HydrogenCodeStub::GenerateLightweightMiss(MacroAssembler* masm) {
309 // Update the static counter each time a new code stub is generated.
310 Isolate* isolate = masm->isolate();
311 isolate->counters()->code_stubs()->Increment();
313 CodeStubInterfaceDescriptor* descriptor = GetInterfaceDescriptor(isolate);
314 int param_count = descriptor->register_param_count_;
316 // Call the runtime system in a fresh internal frame.
317 FrameScope scope(masm, StackFrame::INTERNAL);
318 ASSERT(descriptor->register_param_count_ == 0 ||
319 rax.is(descriptor->register_params_[param_count - 1]));
321 for (int i = 0; i < param_count; ++i) {
322 __ push(descriptor->register_params_[i]);
324 ExternalReference miss = descriptor->miss_handler();
325 __ CallExternalReference(miss, descriptor->register_param_count_);
332 void FastNewContextStub::Generate(MacroAssembler* masm) {
333 // Try to allocate the context in new space.
335 int length = slots_ + Context::MIN_CONTEXT_SLOTS;
336 __ Allocate((length * kPointerSize) + FixedArray::kHeaderSize,
337 rax, rbx, rcx, &gc, TAG_OBJECT);
339 // Get the function from the stack.
340 StackArgumentsAccessor args(rsp, 1, ARGUMENTS_DONT_CONTAIN_RECEIVER);
341 __ movq(rcx, args.GetArgumentOperand(0));
343 // Set up the object header.
344 __ LoadRoot(kScratchRegister, Heap::kFunctionContextMapRootIndex);
345 __ movq(FieldOperand(rax, HeapObject::kMapOffset), kScratchRegister);
346 __ Move(FieldOperand(rax, FixedArray::kLengthOffset), Smi::FromInt(length));
348 // Set up the fixed slots.
349 __ Set(rbx, 0); // Set to NULL.
350 __ movq(Operand(rax, Context::SlotOffset(Context::CLOSURE_INDEX)), rcx);
351 __ movq(Operand(rax, Context::SlotOffset(Context::PREVIOUS_INDEX)), rsi);
352 __ movq(Operand(rax, Context::SlotOffset(Context::EXTENSION_INDEX)), rbx);
354 // Copy the global object from the previous context.
355 __ movq(rbx, Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
356 __ movq(Operand(rax, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)), rbx);
358 // Initialize the rest of the slots to undefined.
359 __ LoadRoot(rbx, Heap::kUndefinedValueRootIndex);
360 for (int i = Context::MIN_CONTEXT_SLOTS; i < length; i++) {
361 __ movq(Operand(rax, Context::SlotOffset(i)), rbx);
364 // Return and remove the on-stack parameter.
366 __ ret(1 * kPointerSize);
368 // Need to collect. Call into runtime system.
370 __ TailCallRuntime(Runtime::kNewFunctionContext, 1, 1);
374 void FastNewBlockContextStub::Generate(MacroAssembler* masm) {
375 // Stack layout on entry:
377 // [rsp + (1 * kPointerSize)] : function
378 // [rsp + (2 * kPointerSize)] : serialized scope info
380 // Try to allocate the context in new space.
382 int length = slots_ + Context::MIN_CONTEXT_SLOTS;
383 __ Allocate(FixedArray::SizeFor(length),
384 rax, rbx, rcx, &gc, TAG_OBJECT);
386 // Get the function from the stack.
387 StackArgumentsAccessor args(rsp, 2, ARGUMENTS_DONT_CONTAIN_RECEIVER);
388 __ movq(rcx, args.GetArgumentOperand(1));
389 // Get the serialized scope info from the stack.
390 __ movq(rbx, args.GetArgumentOperand(0));
392 // Set up the object header.
393 __ LoadRoot(kScratchRegister, Heap::kBlockContextMapRootIndex);
394 __ movq(FieldOperand(rax, HeapObject::kMapOffset), kScratchRegister);
395 __ Move(FieldOperand(rax, FixedArray::kLengthOffset), Smi::FromInt(length));
397 // If this block context is nested in the native context we get a smi
398 // sentinel instead of a function. The block context should get the
399 // canonical empty function of the native context as its closure which
400 // we still have to look up.
401 Label after_sentinel;
402 __ JumpIfNotSmi(rcx, &after_sentinel, Label::kNear);
403 if (FLAG_debug_code) {
404 __ cmpq(rcx, Immediate(0));
405 __ Assert(equal, kExpected0AsASmiSentinel);
407 __ movq(rcx, GlobalObjectOperand());
408 __ movq(rcx, FieldOperand(rcx, GlobalObject::kNativeContextOffset));
409 __ movq(rcx, ContextOperand(rcx, Context::CLOSURE_INDEX));
410 __ bind(&after_sentinel);
412 // Set up the fixed slots.
413 __ movq(ContextOperand(rax, Context::CLOSURE_INDEX), rcx);
414 __ movq(ContextOperand(rax, Context::PREVIOUS_INDEX), rsi);
415 __ movq(ContextOperand(rax, Context::EXTENSION_INDEX), rbx);
417 // Copy the global object from the previous context.
418 __ movq(rbx, ContextOperand(rsi, Context::GLOBAL_OBJECT_INDEX));
419 __ movq(ContextOperand(rax, Context::GLOBAL_OBJECT_INDEX), rbx);
421 // Initialize the rest of the slots to the hole value.
422 __ LoadRoot(rbx, Heap::kTheHoleValueRootIndex);
423 for (int i = 0; i < slots_; i++) {
424 __ movq(ContextOperand(rax, i + Context::MIN_CONTEXT_SLOTS), rbx);
427 // Return and remove the on-stack parameter.
429 __ ret(2 * kPointerSize);
431 // Need to collect. Call into runtime system.
433 __ TailCallRuntime(Runtime::kPushBlockContext, 2, 1);
437 void StoreBufferOverflowStub::Generate(MacroAssembler* masm) {
438 __ PushCallerSaved(save_doubles_);
439 const int argument_count = 1;
440 __ PrepareCallCFunction(argument_count);
441 __ LoadAddress(arg_reg_1,
442 ExternalReference::isolate_address(masm->isolate()));
444 AllowExternalCallThatCantCauseGC scope(masm);
446 ExternalReference::store_buffer_overflow_function(masm->isolate()),
448 __ PopCallerSaved(save_doubles_);
453 class FloatingPointHelper : public AllStatic {
455 enum ConvertUndefined {
456 CONVERT_UNDEFINED_TO_ZERO,
459 // Load the operands from rdx and rax into xmm0 and xmm1, as doubles.
460 // If the operands are not both numbers, jump to not_numbers.
461 // Leaves rdx and rax unchanged. SmiOperands assumes both are smis.
462 // NumberOperands assumes both are smis or heap numbers.
463 static void LoadSSE2UnknownOperands(MacroAssembler* masm,
468 void DoubleToIStub::Generate(MacroAssembler* masm) {
469 Register input_reg = this->source();
470 Register final_result_reg = this->destination();
471 ASSERT(is_truncating());
473 Label check_negative, process_64_bits, done;
475 int double_offset = offset();
477 // Account for return address and saved regs if input is rsp.
478 if (input_reg.is(rsp)) double_offset += 3 * kPointerSize;
480 MemOperand mantissa_operand(MemOperand(input_reg, double_offset));
481 MemOperand exponent_operand(MemOperand(input_reg,
482 double_offset + kDoubleSize / 2));
485 Register scratch_candidates[3] = { rbx, rdx, rdi };
486 for (int i = 0; i < 3; i++) {
487 scratch1 = scratch_candidates[i];
488 if (!final_result_reg.is(scratch1) && !input_reg.is(scratch1)) break;
491 // Since we must use rcx for shifts below, use some other register (rax)
492 // to calculate the result if ecx is the requested return register.
493 Register result_reg = final_result_reg.is(rcx) ? rax : final_result_reg;
494 // Save ecx if it isn't the return register and therefore volatile, or if it
495 // is the return register, then save the temp register we use in its stead
497 Register save_reg = final_result_reg.is(rcx) ? rax : rcx;
501 bool stash_exponent_copy = !input_reg.is(rsp);
502 __ movl(scratch1, mantissa_operand);
503 __ movsd(xmm0, mantissa_operand);
504 __ movl(rcx, exponent_operand);
505 if (stash_exponent_copy) __ push(rcx);
507 __ andl(rcx, Immediate(HeapNumber::kExponentMask));
508 __ shrl(rcx, Immediate(HeapNumber::kExponentShift));
509 __ leal(result_reg, MemOperand(rcx, -HeapNumber::kExponentBias));
510 __ cmpl(result_reg, Immediate(HeapNumber::kMantissaBits));
511 __ j(below, &process_64_bits);
513 // Result is entirely in lower 32-bits of mantissa
514 int delta = HeapNumber::kExponentBias + Double::kPhysicalSignificandSize;
515 __ subl(rcx, Immediate(delta));
516 __ xorl(result_reg, result_reg);
517 __ cmpl(rcx, Immediate(31));
519 __ shll_cl(scratch1);
520 __ jmp(&check_negative);
522 __ bind(&process_64_bits);
523 __ cvttsd2siq(result_reg, xmm0);
524 __ jmp(&done, Label::kNear);
526 // If the double was negative, negate the integer result.
527 __ bind(&check_negative);
528 __ movl(result_reg, scratch1);
530 if (stash_exponent_copy) {
531 __ cmpl(MemOperand(rsp, 0), Immediate(0));
533 __ cmpl(exponent_operand, Immediate(0));
535 __ cmovl(greater, result_reg, scratch1);
539 if (stash_exponent_copy) {
540 __ addq(rsp, Immediate(kDoubleSize));
542 if (!final_result_reg.is(result_reg)) {
543 ASSERT(final_result_reg.is(rcx));
544 __ movl(final_result_reg, result_reg);
552 void TranscendentalCacheStub::Generate(MacroAssembler* masm) {
555 // rsp[8] : argument (should be number).
556 // rsp[0] : return address.
558 // rax: tagged double result.
561 // rsp[0] : return address.
562 // xmm1 : untagged double input argument
564 // xmm1 : untagged double result.
567 Label runtime_call_clear_stack;
569 const bool tagged = (argument_type_ == TAGGED);
571 Label input_not_smi, loaded;
573 // Test that rax is a number.
574 StackArgumentsAccessor args(rsp, 1, ARGUMENTS_DONT_CONTAIN_RECEIVER);
575 __ movq(rax, args.GetArgumentOperand(0));
576 __ JumpIfNotSmi(rax, &input_not_smi, Label::kNear);
577 // Input is a smi. Untag and load it onto the FPU stack.
578 // Then load the bits of the double into rbx.
579 __ SmiToInteger32(rax, rax);
580 __ subq(rsp, Immediate(kDoubleSize));
581 __ Cvtlsi2sd(xmm1, rax);
582 __ movsd(Operand(rsp, 0), xmm1);
585 __ fld_d(Operand(rsp, 0));
586 __ addq(rsp, Immediate(kDoubleSize));
587 __ jmp(&loaded, Label::kNear);
589 __ bind(&input_not_smi);
590 // Check if input is a HeapNumber.
591 __ LoadRoot(rbx, Heap::kHeapNumberMapRootIndex);
592 __ cmpq(rbx, FieldOperand(rax, HeapObject::kMapOffset));
593 __ j(not_equal, &runtime_call);
594 // Input is a HeapNumber. Push it on the FPU stack and load its
596 __ fld_d(FieldOperand(rax, HeapNumber::kValueOffset));
597 __ MoveDouble(rbx, FieldOperand(rax, HeapNumber::kValueOffset));
601 } else { // UNTAGGED.
606 // ST[0] == double value, if TAGGED.
607 // rbx = bits of double value.
608 // rdx = also bits of double value.
609 // Compute hash (h is 32 bits, bits are 64 and the shifts are arithmetic):
610 // h = h0 = bits ^ (bits >> 32);
613 // h = h & (cacheSize - 1);
614 // or h = (h0 ^ (h0 >> 8) ^ (h0 >> 16) ^ (h0 >> 24)) & (cacheSize - 1)
615 __ sar(rdx, Immediate(32));
620 __ sarl(rdx, Immediate(8));
621 __ sarl(rcx, Immediate(16));
622 __ sarl(rax, Immediate(24));
626 ASSERT(IsPowerOf2(TranscendentalCache::SubCache::kCacheSize));
627 __ andl(rcx, Immediate(TranscendentalCache::SubCache::kCacheSize - 1));
629 // ST[0] == double value.
630 // rbx = bits of double value.
631 // rcx = TranscendentalCache::hash(double value).
632 ExternalReference cache_array =
633 ExternalReference::transcendental_cache_array_address(masm->isolate());
634 __ movq(rax, cache_array);
635 int cache_array_index =
636 type_ * sizeof(masm->isolate()->transcendental_cache()->caches_[0]);
637 __ movq(rax, Operand(rax, cache_array_index));
638 // rax points to the cache for the type type_.
639 // If NULL, the cache hasn't been initialized yet, so go through runtime.
641 __ j(zero, &runtime_call_clear_stack); // Only clears stack if TAGGED.
643 // Check that the layout of cache elements match expectations.
644 { // NOLINT - doesn't like a single brace on a line.
645 TranscendentalCache::SubCache::Element test_elem[2];
646 char* elem_start = reinterpret_cast<char*>(&test_elem[0]);
647 char* elem2_start = reinterpret_cast<char*>(&test_elem[1]);
648 char* elem_in0 = reinterpret_cast<char*>(&(test_elem[0].in[0]));
649 char* elem_in1 = reinterpret_cast<char*>(&(test_elem[0].in[1]));
650 char* elem_out = reinterpret_cast<char*>(&(test_elem[0].output));
651 // Two uint_32's and a pointer per element.
652 CHECK_EQ(2 * kIntSize + 1 * kPointerSize,
653 static_cast<int>(elem2_start - elem_start));
654 CHECK_EQ(0, static_cast<int>(elem_in0 - elem_start));
655 CHECK_EQ(kIntSize, static_cast<int>(elem_in1 - elem_start));
656 CHECK_EQ(2 * kIntSize, static_cast<int>(elem_out - elem_start));
659 // Find the address of the rcx'th entry in the cache, i.e., &rax[rcx*16].
661 __ lea(rcx, Operand(rax, rcx, times_8, 0));
662 // Check if cache matches: Double value is stored in uint32_t[2] array.
664 __ cmpq(rbx, Operand(rcx, 0));
665 __ j(not_equal, &cache_miss, Label::kNear);
667 Counters* counters = masm->isolate()->counters();
668 __ IncrementCounter(counters->transcendental_cache_hit(), 1);
669 __ movq(rax, Operand(rcx, 2 * kIntSize));
671 __ fstp(0); // Clear FPU stack.
672 __ ret(kPointerSize);
673 } else { // UNTAGGED.
674 __ movsd(xmm1, FieldOperand(rax, HeapNumber::kValueOffset));
678 __ bind(&cache_miss);
679 __ IncrementCounter(counters->transcendental_cache_miss(), 1);
680 // Update cache with new value.
682 __ AllocateHeapNumber(rax, rdi, &runtime_call_clear_stack);
683 } else { // UNTAGGED.
684 __ AllocateHeapNumber(rax, rdi, &skip_cache);
685 __ movsd(FieldOperand(rax, HeapNumber::kValueOffset), xmm1);
686 __ fld_d(FieldOperand(rax, HeapNumber::kValueOffset));
688 GenerateOperation(masm, type_);
689 __ movq(Operand(rcx, 0), rbx);
690 __ movq(Operand(rcx, 2 * kIntSize), rax);
691 __ fstp_d(FieldOperand(rax, HeapNumber::kValueOffset));
693 __ ret(kPointerSize);
694 } else { // UNTAGGED.
695 __ movsd(xmm1, FieldOperand(rax, HeapNumber::kValueOffset));
698 // Skip cache and return answer directly, only in untagged case.
699 __ bind(&skip_cache);
700 __ subq(rsp, Immediate(kDoubleSize));
701 __ movsd(Operand(rsp, 0), xmm1);
702 __ fld_d(Operand(rsp, 0));
703 GenerateOperation(masm, type_);
704 __ fstp_d(Operand(rsp, 0));
705 __ movsd(xmm1, Operand(rsp, 0));
706 __ addq(rsp, Immediate(kDoubleSize));
707 // We return the value in xmm1 without adding it to the cache, but
708 // we cause a scavenging GC so that future allocations will succeed.
710 FrameScope scope(masm, StackFrame::INTERNAL);
711 // Allocate an unused object bigger than a HeapNumber.
712 __ Push(Smi::FromInt(2 * kDoubleSize));
713 __ CallRuntimeSaveDoubles(Runtime::kAllocateInNewSpace);
718 // Call runtime, doing whatever allocation and cleanup is necessary.
720 __ bind(&runtime_call_clear_stack);
722 __ bind(&runtime_call);
723 __ TailCallExternalReference(
724 ExternalReference(RuntimeFunction(), masm->isolate()), 1, 1);
725 } else { // UNTAGGED.
726 __ bind(&runtime_call_clear_stack);
727 __ bind(&runtime_call);
728 __ AllocateHeapNumber(rax, rdi, &skip_cache);
729 __ movsd(FieldOperand(rax, HeapNumber::kValueOffset), xmm1);
731 FrameScope scope(masm, StackFrame::INTERNAL);
733 __ CallRuntime(RuntimeFunction(), 1);
735 __ movsd(xmm1, FieldOperand(rax, HeapNumber::kValueOffset));
741 Runtime::FunctionId TranscendentalCacheStub::RuntimeFunction() {
743 // Add more cases when necessary.
744 case TranscendentalCache::SIN: return Runtime::kMath_sin;
745 case TranscendentalCache::COS: return Runtime::kMath_cos;
746 case TranscendentalCache::TAN: return Runtime::kMath_tan;
747 case TranscendentalCache::LOG: return Runtime::kMath_log;
750 return Runtime::kAbort;
755 void TranscendentalCacheStub::GenerateOperation(
756 MacroAssembler* masm, TranscendentalCache::Type type) {
758 // rax: Newly allocated HeapNumber, which must be preserved.
759 // rbx: Bits of input double. Must be preserved.
760 // rcx: Pointer to cache entry. Must be preserved.
761 // st(0): Input double
763 if (type == TranscendentalCache::SIN ||
764 type == TranscendentalCache::COS ||
765 type == TranscendentalCache::TAN) {
766 // Both fsin and fcos require arguments in the range +/-2^63 and
767 // return NaN for infinities and NaN. They can share all code except
768 // the actual fsin/fcos operation.
770 // If argument is outside the range -2^63..2^63, fsin/cos doesn't
771 // work. We must reduce it to the appropriate range.
773 // Move exponent and sign bits to low bits.
774 __ shr(rdi, Immediate(HeapNumber::kMantissaBits));
776 __ andl(rdi, Immediate((1 << HeapNumber::kExponentBits) - 1));
777 int supported_exponent_limit = (63 + HeapNumber::kExponentBias);
778 __ cmpl(rdi, Immediate(supported_exponent_limit));
779 __ j(below, &in_range);
780 // Check for infinity and NaN. Both return NaN for sin.
781 __ cmpl(rdi, Immediate(0x7ff));
782 Label non_nan_result;
783 __ j(not_equal, &non_nan_result, Label::kNear);
784 // Input is +/-Infinity or NaN. Result is NaN.
786 // NaN is represented by 0x7ff8000000000000.
787 __ subq(rsp, Immediate(kPointerSize));
788 __ movl(Operand(rsp, 4), Immediate(0x7ff80000));
789 __ movl(Operand(rsp, 0), Immediate(0x00000000));
790 __ fld_d(Operand(rsp, 0));
791 __ addq(rsp, Immediate(kPointerSize));
794 __ bind(&non_nan_result);
796 // Use fpmod to restrict argument to the range +/-2*PI.
797 __ movq(rdi, rax); // Save rax before using fnstsw_ax.
801 // FPU Stack: input, 2*pi, input.
806 // Clear if Illegal Operand or Zero Division exceptions are set.
807 __ testl(rax, Immediate(5)); // #IO and #ZD flags of FPU status word.
808 __ j(zero, &no_exceptions);
810 __ bind(&no_exceptions);
813 // Compute st(0) % st(1)
815 Label partial_remainder_loop;
816 __ bind(&partial_remainder_loop);
820 __ testl(rax, Immediate(0x400)); // Check C2 bit of FPU status word.
821 // If C2 is set, computation only has partial result. Loop to
822 // continue computation.
823 __ j(not_zero, &partial_remainder_loop);
825 // FPU Stack: input, 2*pi, input % 2*pi
827 // FPU Stack: input % 2*pi, 2*pi,
829 // FPU Stack: input % 2*pi
830 __ movq(rax, rdi); // Restore rax, pointer to the new HeapNumber.
833 case TranscendentalCache::SIN:
836 case TranscendentalCache::COS:
839 case TranscendentalCache::TAN:
840 // FPTAN calculates tangent onto st(0) and pushes 1.0 onto the
841 // FP register stack.
843 __ fstp(0); // Pop FP register stack.
850 ASSERT(type == TranscendentalCache::LOG);
858 void FloatingPointHelper::LoadSSE2UnknownOperands(MacroAssembler* masm,
859 Label* not_numbers) {
860 Label load_smi_rdx, load_nonsmi_rax, load_smi_rax, load_float_rax, done;
861 // Load operand in rdx into xmm0, or branch to not_numbers.
862 __ LoadRoot(rcx, Heap::kHeapNumberMapRootIndex);
863 __ JumpIfSmi(rdx, &load_smi_rdx);
864 __ cmpq(FieldOperand(rdx, HeapObject::kMapOffset), rcx);
865 __ j(not_equal, not_numbers); // Argument in rdx is not a number.
866 __ movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset));
867 // Load operand in rax into xmm1, or branch to not_numbers.
868 __ JumpIfSmi(rax, &load_smi_rax);
870 __ bind(&load_nonsmi_rax);
871 __ cmpq(FieldOperand(rax, HeapObject::kMapOffset), rcx);
872 __ j(not_equal, not_numbers);
873 __ movsd(xmm1, FieldOperand(rax, HeapNumber::kValueOffset));
876 __ bind(&load_smi_rdx);
877 __ SmiToInteger32(kScratchRegister, rdx);
878 __ Cvtlsi2sd(xmm0, kScratchRegister);
879 __ JumpIfNotSmi(rax, &load_nonsmi_rax);
881 __ bind(&load_smi_rax);
882 __ SmiToInteger32(kScratchRegister, rax);
883 __ Cvtlsi2sd(xmm1, kScratchRegister);
888 void MathPowStub::Generate(MacroAssembler* masm) {
889 const Register exponent = rdx;
890 const Register base = rax;
891 const Register scratch = rcx;
892 const XMMRegister double_result = xmm3;
893 const XMMRegister double_base = xmm2;
894 const XMMRegister double_exponent = xmm1;
895 const XMMRegister double_scratch = xmm4;
897 Label call_runtime, done, exponent_not_smi, int_exponent;
899 // Save 1 in double_result - we need this several times later on.
900 __ movq(scratch, Immediate(1));
901 __ Cvtlsi2sd(double_result, scratch);
903 if (exponent_type_ == ON_STACK) {
904 Label base_is_smi, unpack_exponent;
905 // The exponent and base are supplied as arguments on the stack.
906 // This can only happen if the stub is called from non-optimized code.
907 // Load input parameters from stack.
908 StackArgumentsAccessor args(rsp, 2, ARGUMENTS_DONT_CONTAIN_RECEIVER);
909 __ movq(base, args.GetArgumentOperand(0));
910 __ movq(exponent, args.GetArgumentOperand(1));
911 __ JumpIfSmi(base, &base_is_smi, Label::kNear);
912 __ CompareRoot(FieldOperand(base, HeapObject::kMapOffset),
913 Heap::kHeapNumberMapRootIndex);
914 __ j(not_equal, &call_runtime);
916 __ movsd(double_base, FieldOperand(base, HeapNumber::kValueOffset));
917 __ jmp(&unpack_exponent, Label::kNear);
919 __ bind(&base_is_smi);
920 __ SmiToInteger32(base, base);
921 __ Cvtlsi2sd(double_base, base);
922 __ bind(&unpack_exponent);
924 __ JumpIfNotSmi(exponent, &exponent_not_smi, Label::kNear);
925 __ SmiToInteger32(exponent, exponent);
926 __ jmp(&int_exponent);
928 __ bind(&exponent_not_smi);
929 __ CompareRoot(FieldOperand(exponent, HeapObject::kMapOffset),
930 Heap::kHeapNumberMapRootIndex);
931 __ j(not_equal, &call_runtime);
932 __ movsd(double_exponent, FieldOperand(exponent, HeapNumber::kValueOffset));
933 } else if (exponent_type_ == TAGGED) {
934 __ JumpIfNotSmi(exponent, &exponent_not_smi, Label::kNear);
935 __ SmiToInteger32(exponent, exponent);
936 __ jmp(&int_exponent);
938 __ bind(&exponent_not_smi);
939 __ movsd(double_exponent, FieldOperand(exponent, HeapNumber::kValueOffset));
942 if (exponent_type_ != INTEGER) {
943 Label fast_power, try_arithmetic_simplification;
944 // Detect integer exponents stored as double.
945 __ DoubleToI(exponent, double_exponent, double_scratch,
946 TREAT_MINUS_ZERO_AS_ZERO, &try_arithmetic_simplification);
947 __ jmp(&int_exponent);
949 __ bind(&try_arithmetic_simplification);
950 __ cvttsd2si(exponent, double_exponent);
951 // Skip to runtime if possibly NaN (indicated by the indefinite integer).
952 __ cmpl(exponent, Immediate(0x80000000u));
953 __ j(equal, &call_runtime);
955 if (exponent_type_ == ON_STACK) {
956 // Detect square root case. Crankshaft detects constant +/-0.5 at
957 // compile time and uses DoMathPowHalf instead. We then skip this check
958 // for non-constant cases of +/-0.5 as these hardly occur.
959 Label continue_sqrt, continue_rsqrt, not_plus_half;
961 // Load double_scratch with 0.5.
962 __ movq(scratch, V8_UINT64_C(0x3FE0000000000000), RelocInfo::NONE64);
963 __ movq(double_scratch, scratch);
964 // Already ruled out NaNs for exponent.
965 __ ucomisd(double_scratch, double_exponent);
966 __ j(not_equal, ¬_plus_half, Label::kNear);
968 // Calculates square root of base. Check for the special case of
969 // Math.pow(-Infinity, 0.5) == Infinity (ECMA spec, 15.8.2.13).
970 // According to IEEE-754, double-precision -Infinity has the highest
971 // 12 bits set and the lowest 52 bits cleared.
972 __ movq(scratch, V8_UINT64_C(0xFFF0000000000000), RelocInfo::NONE64);
973 __ movq(double_scratch, scratch);
974 __ ucomisd(double_scratch, double_base);
975 // Comparing -Infinity with NaN results in "unordered", which sets the
976 // zero flag as if both were equal. However, it also sets the carry flag.
977 __ j(not_equal, &continue_sqrt, Label::kNear);
978 __ j(carry, &continue_sqrt, Label::kNear);
980 // Set result to Infinity in the special case.
981 __ xorps(double_result, double_result);
982 __ subsd(double_result, double_scratch);
985 __ bind(&continue_sqrt);
986 // sqrtsd returns -0 when input is -0. ECMA spec requires +0.
987 __ xorps(double_scratch, double_scratch);
988 __ addsd(double_scratch, double_base); // Convert -0 to 0.
989 __ sqrtsd(double_result, double_scratch);
993 __ bind(¬_plus_half);
994 // Load double_scratch with -0.5 by substracting 1.
995 __ subsd(double_scratch, double_result);
996 // Already ruled out NaNs for exponent.
997 __ ucomisd(double_scratch, double_exponent);
998 __ j(not_equal, &fast_power, Label::kNear);
1000 // Calculates reciprocal of square root of base. Check for the special
1001 // case of Math.pow(-Infinity, -0.5) == 0 (ECMA spec, 15.8.2.13).
1002 // According to IEEE-754, double-precision -Infinity has the highest
1003 // 12 bits set and the lowest 52 bits cleared.
1004 __ movq(scratch, V8_UINT64_C(0xFFF0000000000000), RelocInfo::NONE64);
1005 __ movq(double_scratch, scratch);
1006 __ ucomisd(double_scratch, double_base);
1007 // Comparing -Infinity with NaN results in "unordered", which sets the
1008 // zero flag as if both were equal. However, it also sets the carry flag.
1009 __ j(not_equal, &continue_rsqrt, Label::kNear);
1010 __ j(carry, &continue_rsqrt, Label::kNear);
1012 // Set result to 0 in the special case.
1013 __ xorps(double_result, double_result);
1016 __ bind(&continue_rsqrt);
1017 // sqrtsd returns -0 when input is -0. ECMA spec requires +0.
1018 __ xorps(double_exponent, double_exponent);
1019 __ addsd(double_exponent, double_base); // Convert -0 to +0.
1020 __ sqrtsd(double_exponent, double_exponent);
1021 __ divsd(double_result, double_exponent);
1025 // Using FPU instructions to calculate power.
1026 Label fast_power_failed;
1027 __ bind(&fast_power);
1028 __ fnclex(); // Clear flags to catch exceptions later.
1029 // Transfer (B)ase and (E)xponent onto the FPU register stack.
1030 __ subq(rsp, Immediate(kDoubleSize));
1031 __ movsd(Operand(rsp, 0), double_exponent);
1032 __ fld_d(Operand(rsp, 0)); // E
1033 __ movsd(Operand(rsp, 0), double_base);
1034 __ fld_d(Operand(rsp, 0)); // B, E
1036 // Exponent is in st(1) and base is in st(0)
1037 // B ^ E = (2^(E * log2(B)) - 1) + 1 = (2^X - 1) + 1 for X = E * log2(B)
1038 // FYL2X calculates st(1) * log2(st(0))
1041 __ frndint(); // rnd(X), X
1042 __ fsub(1); // rnd(X), X-rnd(X)
1043 __ fxch(1); // X - rnd(X), rnd(X)
1044 // F2XM1 calculates 2^st(0) - 1 for -1 < st(0) < 1
1045 __ f2xm1(); // 2^(X-rnd(X)) - 1, rnd(X)
1046 __ fld1(); // 1, 2^(X-rnd(X)) - 1, rnd(X)
1047 __ faddp(1); // 2^(X-rnd(X)), rnd(X)
1048 // FSCALE calculates st(0) * 2^st(1)
1049 __ fscale(); // 2^X, rnd(X)
1051 // Bail out to runtime in case of exceptions in the status word.
1053 __ testb(rax, Immediate(0x5F)); // Check for all but precision exception.
1054 __ j(not_zero, &fast_power_failed, Label::kNear);
1055 __ fstp_d(Operand(rsp, 0));
1056 __ movsd(double_result, Operand(rsp, 0));
1057 __ addq(rsp, Immediate(kDoubleSize));
1060 __ bind(&fast_power_failed);
1062 __ addq(rsp, Immediate(kDoubleSize));
1063 __ jmp(&call_runtime);
1066 // Calculate power with integer exponent.
1067 __ bind(&int_exponent);
1068 const XMMRegister double_scratch2 = double_exponent;
1069 // Back up exponent as we need to check if exponent is negative later.
1070 __ movq(scratch, exponent); // Back up exponent.
1071 __ movsd(double_scratch, double_base); // Back up base.
1072 __ movsd(double_scratch2, double_result); // Load double_exponent with 1.
1074 // Get absolute value of exponent.
1075 Label no_neg, while_true, while_false;
1076 __ testl(scratch, scratch);
1077 __ j(positive, &no_neg, Label::kNear);
1081 __ j(zero, &while_false, Label::kNear);
1082 __ shrl(scratch, Immediate(1));
1083 // Above condition means CF==0 && ZF==0. This means that the
1084 // bit that has been shifted out is 0 and the result is not 0.
1085 __ j(above, &while_true, Label::kNear);
1086 __ movsd(double_result, double_scratch);
1087 __ j(zero, &while_false, Label::kNear);
1089 __ bind(&while_true);
1090 __ shrl(scratch, Immediate(1));
1091 __ mulsd(double_scratch, double_scratch);
1092 __ j(above, &while_true, Label::kNear);
1093 __ mulsd(double_result, double_scratch);
1094 __ j(not_zero, &while_true);
1096 __ bind(&while_false);
1097 // If the exponent is negative, return 1/result.
1098 __ testl(exponent, exponent);
1099 __ j(greater, &done);
1100 __ divsd(double_scratch2, double_result);
1101 __ movsd(double_result, double_scratch2);
1102 // Test whether result is zero. Bail out to check for subnormal result.
1103 // Due to subnormals, x^-y == (1/x)^y does not hold in all cases.
1104 __ xorps(double_scratch2, double_scratch2);
1105 __ ucomisd(double_scratch2, double_result);
1106 // double_exponent aliased as double_scratch2 has already been overwritten
1107 // and may not have contained the exponent value in the first place when the
1108 // input was a smi. We reset it with exponent value before bailing out.
1109 __ j(not_equal, &done);
1110 __ Cvtlsi2sd(double_exponent, exponent);
1112 // Returning or bailing out.
1113 Counters* counters = masm->isolate()->counters();
1114 if (exponent_type_ == ON_STACK) {
1115 // The arguments are still on the stack.
1116 __ bind(&call_runtime);
1117 __ TailCallRuntime(Runtime::kMath_pow_cfunction, 2, 1);
1119 // The stub is called from non-optimized code, which expects the result
1120 // as heap number in rax.
1122 __ AllocateHeapNumber(rax, rcx, &call_runtime);
1123 __ movsd(FieldOperand(rax, HeapNumber::kValueOffset), double_result);
1124 __ IncrementCounter(counters->math_pow(), 1);
1125 __ ret(2 * kPointerSize);
1127 __ bind(&call_runtime);
1128 // Move base to the correct argument register. Exponent is already in xmm1.
1129 __ movsd(xmm0, double_base);
1130 ASSERT(double_exponent.is(xmm1));
1132 AllowExternalCallThatCantCauseGC scope(masm);
1133 __ PrepareCallCFunction(2);
1135 ExternalReference::power_double_double_function(masm->isolate()), 2);
1137 // Return value is in xmm0.
1138 __ movsd(double_result, xmm0);
1139 // Restore context register.
1140 __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset));
1143 __ IncrementCounter(counters->math_pow(), 1);
1149 void FunctionPrototypeStub::Generate(MacroAssembler* masm) {
1152 if (kind() == Code::KEYED_LOAD_IC) {
1153 // ----------- S t a t e -------------
1155 // -- rdx : receiver
1156 // -- rsp[0] : return address
1157 // -----------------------------------
1158 __ Cmp(rax, masm->isolate()->factory()->prototype_string());
1159 __ j(not_equal, &miss);
1162 ASSERT(kind() == Code::LOAD_IC);
1163 // ----------- S t a t e -------------
1164 // -- rax : receiver
1166 // -- rsp[0] : return address
1167 // -----------------------------------
1171 StubCompiler::GenerateLoadFunctionPrototype(masm, receiver, r8, r9, &miss);
1173 StubCompiler::TailCallBuiltin(
1174 masm, BaseLoadStoreStubCompiler::MissBuiltin(kind()));
1178 void StringLengthStub::Generate(MacroAssembler* masm) {
1181 if (kind() == Code::KEYED_LOAD_IC) {
1182 // ----------- S t a t e -------------
1184 // -- rdx : receiver
1185 // -- rsp[0] : return address
1186 // -----------------------------------
1187 __ Cmp(rax, masm->isolate()->factory()->length_string());
1188 __ j(not_equal, &miss);
1191 ASSERT(kind() == Code::LOAD_IC);
1192 // ----------- S t a t e -------------
1193 // -- rax : receiver
1195 // -- rsp[0] : return address
1196 // -----------------------------------
1200 StubCompiler::GenerateLoadStringLength(masm, receiver, r8, r9, &miss);
1202 StubCompiler::TailCallBuiltin(
1203 masm, BaseLoadStoreStubCompiler::MissBuiltin(kind()));
1207 void StoreArrayLengthStub::Generate(MacroAssembler* masm) {
1208 // ----------- S t a t e -------------
1211 // -- rdx : receiver
1212 // -- rsp[0] : return address
1213 // -----------------------------------
1215 // This accepts as a receiver anything JSArray::SetElementsLength accepts
1216 // (currently anything except for external arrays which means anything with
1217 // elements of FixedArray type). Value must be a number, but only smis are
1218 // accepted as the most common case.
1222 Register receiver = rdx;
1223 Register value = rax;
1224 Register scratch = rbx;
1225 if (kind() == Code::KEYED_STORE_IC) {
1226 __ Cmp(rcx, masm->isolate()->factory()->length_string());
1227 __ j(not_equal, &miss);
1230 // Check that the receiver isn't a smi.
1231 __ JumpIfSmi(receiver, &miss);
1233 // Check that the object is a JS array.
1234 __ CmpObjectType(receiver, JS_ARRAY_TYPE, scratch);
1235 __ j(not_equal, &miss);
1237 // Check that elements are FixedArray.
1238 // We rely on StoreIC_ArrayLength below to deal with all types of
1239 // fast elements (including COW).
1240 __ movq(scratch, FieldOperand(receiver, JSArray::kElementsOffset));
1241 __ CmpObjectType(scratch, FIXED_ARRAY_TYPE, scratch);
1242 __ j(not_equal, &miss);
1244 // Check that the array has fast properties, otherwise the length
1245 // property might have been redefined.
1246 __ movq(scratch, FieldOperand(receiver, JSArray::kPropertiesOffset));
1247 __ CompareRoot(FieldOperand(scratch, FixedArray::kMapOffset),
1248 Heap::kHashTableMapRootIndex);
1251 // Check that value is a smi.
1252 __ JumpIfNotSmi(value, &miss);
1254 // Prepare tail call to StoreIC_ArrayLength.
1255 __ PopReturnAddressTo(scratch);
1258 __ PushReturnAddressFrom(scratch);
1260 ExternalReference ref =
1261 ExternalReference(IC_Utility(IC::kStoreIC_ArrayLength), masm->isolate());
1262 __ TailCallExternalReference(ref, 2, 1);
1266 StubCompiler::TailCallBuiltin(
1267 masm, BaseLoadStoreStubCompiler::MissBuiltin(kind()));
1271 void ArgumentsAccessStub::GenerateReadElement(MacroAssembler* masm) {
1272 // The key is in rdx and the parameter count is in rax.
1274 // Check that the key is a smi.
1276 __ JumpIfNotSmi(rdx, &slow);
1278 // Check if the calling frame is an arguments adaptor frame. We look at the
1279 // context offset, and if the frame is not a regular one, then we find a
1280 // Smi instead of the context. We can't use SmiCompare here, because that
1281 // only works for comparing two smis.
1283 __ movq(rbx, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
1284 __ Cmp(Operand(rbx, StandardFrameConstants::kContextOffset),
1285 Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
1286 __ j(equal, &adaptor);
1288 // Check index against formal parameters count limit passed in
1289 // through register rax. Use unsigned comparison to get negative
1292 __ j(above_equal, &slow);
1294 // Read the argument from the stack and return it.
1295 __ SmiSub(rax, rax, rdx);
1296 __ SmiToInteger32(rax, rax);
1297 StackArgumentsAccessor args(rbp, rax, ARGUMENTS_DONT_CONTAIN_RECEIVER);
1298 __ movq(rax, args.GetArgumentOperand(0));
1301 // Arguments adaptor case: Check index against actual arguments
1302 // limit found in the arguments adaptor frame. Use unsigned
1303 // comparison to get negative check for free.
1305 __ movq(rcx, Operand(rbx, ArgumentsAdaptorFrameConstants::kLengthOffset));
1307 __ j(above_equal, &slow);
1309 // Read the argument from the stack and return it.
1310 __ SmiSub(rcx, rcx, rdx);
1311 __ SmiToInteger32(rcx, rcx);
1312 StackArgumentsAccessor adaptor_args(rbx, rcx,
1313 ARGUMENTS_DONT_CONTAIN_RECEIVER);
1314 __ movq(rax, adaptor_args.GetArgumentOperand(0));
1317 // Slow-case: Handle non-smi or out-of-bounds access to arguments
1318 // by calling the runtime system.
1320 __ PopReturnAddressTo(rbx);
1322 __ PushReturnAddressFrom(rbx);
1323 __ TailCallRuntime(Runtime::kGetArgumentsProperty, 1, 1);
1327 void ArgumentsAccessStub::GenerateNewNonStrictFast(MacroAssembler* masm) {
1329 // rsp[0] : return address
1330 // rsp[8] : number of parameters (tagged)
1331 // rsp[16] : receiver displacement
1332 // rsp[24] : function
1333 // Registers used over the whole function:
1334 // rbx: the mapped parameter count (untagged)
1335 // rax: the allocated object (tagged).
1337 Factory* factory = masm->isolate()->factory();
1339 StackArgumentsAccessor args(rsp, 3, ARGUMENTS_DONT_CONTAIN_RECEIVER);
1340 __ SmiToInteger64(rbx, args.GetArgumentOperand(2));
1341 // rbx = parameter count (untagged)
1343 // Check if the calling frame is an arguments adaptor frame.
1345 Label adaptor_frame, try_allocate;
1346 __ movq(rdx, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
1347 __ movq(rcx, Operand(rdx, StandardFrameConstants::kContextOffset));
1348 __ Cmp(rcx, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
1349 __ j(equal, &adaptor_frame);
1351 // No adaptor, parameter count = argument count.
1353 __ jmp(&try_allocate, Label::kNear);
1355 // We have an adaptor frame. Patch the parameters pointer.
1356 __ bind(&adaptor_frame);
1357 __ SmiToInteger64(rcx,
1359 ArgumentsAdaptorFrameConstants::kLengthOffset));
1360 __ lea(rdx, Operand(rdx, rcx, times_pointer_size,
1361 StandardFrameConstants::kCallerSPOffset));
1362 __ movq(args.GetArgumentOperand(1), rdx);
1364 // rbx = parameter count (untagged)
1365 // rcx = argument count (untagged)
1366 // Compute the mapped parameter count = min(rbx, rcx) in rbx.
1368 __ j(less_equal, &try_allocate, Label::kNear);
1371 __ bind(&try_allocate);
1373 // Compute the sizes of backing store, parameter map, and arguments object.
1374 // 1. Parameter map, has 2 extra words containing context and backing store.
1375 const int kParameterMapHeaderSize =
1376 FixedArray::kHeaderSize + 2 * kPointerSize;
1377 Label no_parameter_map;
1380 __ j(zero, &no_parameter_map, Label::kNear);
1381 __ lea(r8, Operand(rbx, times_pointer_size, kParameterMapHeaderSize));
1382 __ bind(&no_parameter_map);
1384 // 2. Backing store.
1385 __ lea(r8, Operand(r8, rcx, times_pointer_size, FixedArray::kHeaderSize));
1387 // 3. Arguments object.
1388 __ addq(r8, Immediate(Heap::kArgumentsObjectSize));
1390 // Do the allocation of all three objects in one go.
1391 __ Allocate(r8, rax, rdx, rdi, &runtime, TAG_OBJECT);
1393 // rax = address of new object(s) (tagged)
1394 // rcx = argument count (untagged)
1395 // Get the arguments boilerplate from the current native context into rdi.
1396 Label has_mapped_parameters, copy;
1397 __ movq(rdi, Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
1398 __ movq(rdi, FieldOperand(rdi, GlobalObject::kNativeContextOffset));
1400 __ j(not_zero, &has_mapped_parameters, Label::kNear);
1402 const int kIndex = Context::ARGUMENTS_BOILERPLATE_INDEX;
1403 __ movq(rdi, Operand(rdi, Context::SlotOffset(kIndex)));
1404 __ jmp(©, Label::kNear);
1406 const int kAliasedIndex = Context::ALIASED_ARGUMENTS_BOILERPLATE_INDEX;
1407 __ bind(&has_mapped_parameters);
1408 __ movq(rdi, Operand(rdi, Context::SlotOffset(kAliasedIndex)));
1411 // rax = address of new object (tagged)
1412 // rbx = mapped parameter count (untagged)
1413 // rcx = argument count (untagged)
1414 // rdi = address of boilerplate object (tagged)
1415 // Copy the JS object part.
1416 for (int i = 0; i < JSObject::kHeaderSize; i += kPointerSize) {
1417 __ movq(rdx, FieldOperand(rdi, i));
1418 __ movq(FieldOperand(rax, i), rdx);
1421 // Set up the callee in-object property.
1422 STATIC_ASSERT(Heap::kArgumentsCalleeIndex == 1);
1423 __ movq(rdx, args.GetArgumentOperand(0));
1424 __ movq(FieldOperand(rax, JSObject::kHeaderSize +
1425 Heap::kArgumentsCalleeIndex * kPointerSize),
1428 // Use the length (smi tagged) and set that as an in-object property too.
1429 // Note: rcx is tagged from here on.
1430 STATIC_ASSERT(Heap::kArgumentsLengthIndex == 0);
1431 __ Integer32ToSmi(rcx, rcx);
1432 __ movq(FieldOperand(rax, JSObject::kHeaderSize +
1433 Heap::kArgumentsLengthIndex * kPointerSize),
1436 // Set up the elements pointer in the allocated arguments object.
1437 // If we allocated a parameter map, edi will point there, otherwise to the
1439 __ lea(rdi, Operand(rax, Heap::kArgumentsObjectSize));
1440 __ movq(FieldOperand(rax, JSObject::kElementsOffset), rdi);
1442 // rax = address of new object (tagged)
1443 // rbx = mapped parameter count (untagged)
1444 // rcx = argument count (tagged)
1445 // rdi = address of parameter map or backing store (tagged)
1447 // Initialize parameter map. If there are no mapped arguments, we're done.
1448 Label skip_parameter_map;
1450 __ j(zero, &skip_parameter_map);
1452 __ LoadRoot(kScratchRegister, Heap::kNonStrictArgumentsElementsMapRootIndex);
1453 // rbx contains the untagged argument count. Add 2 and tag to write.
1454 __ movq(FieldOperand(rdi, FixedArray::kMapOffset), kScratchRegister);
1455 __ Integer64PlusConstantToSmi(r9, rbx, 2);
1456 __ movq(FieldOperand(rdi, FixedArray::kLengthOffset), r9);
1457 __ movq(FieldOperand(rdi, FixedArray::kHeaderSize + 0 * kPointerSize), rsi);
1458 __ lea(r9, Operand(rdi, rbx, times_pointer_size, kParameterMapHeaderSize));
1459 __ movq(FieldOperand(rdi, FixedArray::kHeaderSize + 1 * kPointerSize), r9);
1461 // Copy the parameter slots and the holes in the arguments.
1462 // We need to fill in mapped_parameter_count slots. They index the context,
1463 // where parameters are stored in reverse order, at
1464 // MIN_CONTEXT_SLOTS .. MIN_CONTEXT_SLOTS+parameter_count-1
1465 // The mapped parameter thus need to get indices
1466 // MIN_CONTEXT_SLOTS+parameter_count-1 ..
1467 // MIN_CONTEXT_SLOTS+parameter_count-mapped_parameter_count
1468 // We loop from right to left.
1469 Label parameters_loop, parameters_test;
1471 // Load tagged parameter count into r9.
1472 __ Integer32ToSmi(r9, rbx);
1473 __ Move(r8, Smi::FromInt(Context::MIN_CONTEXT_SLOTS));
1474 __ addq(r8, args.GetArgumentOperand(2));
1476 __ Move(r11, factory->the_hole_value());
1478 __ lea(rdi, Operand(rdi, rbx, times_pointer_size, kParameterMapHeaderSize));
1479 // r9 = loop variable (tagged)
1480 // r8 = mapping index (tagged)
1481 // r11 = the hole value
1482 // rdx = address of parameter map (tagged)
1483 // rdi = address of backing store (tagged)
1484 __ jmp(¶meters_test, Label::kNear);
1486 __ bind(¶meters_loop);
1487 __ SmiSubConstant(r9, r9, Smi::FromInt(1));
1488 __ SmiToInteger64(kScratchRegister, r9);
1489 __ movq(FieldOperand(rdx, kScratchRegister,
1491 kParameterMapHeaderSize),
1493 __ movq(FieldOperand(rdi, kScratchRegister,
1495 FixedArray::kHeaderSize),
1497 __ SmiAddConstant(r8, r8, Smi::FromInt(1));
1498 __ bind(¶meters_test);
1500 __ j(not_zero, ¶meters_loop, Label::kNear);
1502 __ bind(&skip_parameter_map);
1504 // rcx = argument count (tagged)
1505 // rdi = address of backing store (tagged)
1506 // Copy arguments header and remaining slots (if there are any).
1507 __ Move(FieldOperand(rdi, FixedArray::kMapOffset),
1508 factory->fixed_array_map());
1509 __ movq(FieldOperand(rdi, FixedArray::kLengthOffset), rcx);
1511 Label arguments_loop, arguments_test;
1513 __ movq(rdx, args.GetArgumentOperand(1));
1514 // Untag rcx for the loop below.
1515 __ SmiToInteger64(rcx, rcx);
1516 __ lea(kScratchRegister, Operand(r8, times_pointer_size, 0));
1517 __ subq(rdx, kScratchRegister);
1518 __ jmp(&arguments_test, Label::kNear);
1520 __ bind(&arguments_loop);
1521 __ subq(rdx, Immediate(kPointerSize));
1522 __ movq(r9, Operand(rdx, 0));
1523 __ movq(FieldOperand(rdi, r8,
1525 FixedArray::kHeaderSize),
1527 __ addq(r8, Immediate(1));
1529 __ bind(&arguments_test);
1531 __ j(less, &arguments_loop, Label::kNear);
1533 // Return and remove the on-stack parameters.
1534 __ ret(3 * kPointerSize);
1536 // Do the runtime call to allocate the arguments object.
1537 // rcx = argument count (untagged)
1539 __ Integer32ToSmi(rcx, rcx);
1540 __ movq(args.GetArgumentOperand(2), rcx); // Patch argument count.
1541 __ TailCallRuntime(Runtime::kNewArgumentsFast, 3, 1);
1545 void ArgumentsAccessStub::GenerateNewNonStrictSlow(MacroAssembler* masm) {
1546 // rsp[0] : return address
1547 // rsp[8] : number of parameters
1548 // rsp[16] : receiver displacement
1549 // rsp[24] : function
1551 // Check if the calling frame is an arguments adaptor frame.
1553 __ movq(rdx, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
1554 __ movq(rcx, Operand(rdx, StandardFrameConstants::kContextOffset));
1555 __ Cmp(rcx, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
1556 __ j(not_equal, &runtime);
1558 // Patch the arguments.length and the parameters pointer.
1559 StackArgumentsAccessor args(rsp, 3, ARGUMENTS_DONT_CONTAIN_RECEIVER);
1560 __ movq(rcx, Operand(rdx, ArgumentsAdaptorFrameConstants::kLengthOffset));
1561 __ movq(args.GetArgumentOperand(2), rcx);
1562 __ SmiToInteger64(rcx, rcx);
1563 __ lea(rdx, Operand(rdx, rcx, times_pointer_size,
1564 StandardFrameConstants::kCallerSPOffset));
1565 __ movq(args.GetArgumentOperand(1), rdx);
1568 __ TailCallRuntime(Runtime::kNewArgumentsFast, 3, 1);
1572 void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) {
1573 // rsp[0] : return address
1574 // rsp[8] : number of parameters
1575 // rsp[16] : receiver displacement
1576 // rsp[24] : function
1578 // Check if the calling frame is an arguments adaptor frame.
1579 Label adaptor_frame, try_allocate, runtime;
1580 __ movq(rdx, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
1581 __ movq(rcx, Operand(rdx, StandardFrameConstants::kContextOffset));
1582 __ Cmp(rcx, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
1583 __ j(equal, &adaptor_frame);
1585 // Get the length from the frame.
1586 StackArgumentsAccessor args(rsp, 3, ARGUMENTS_DONT_CONTAIN_RECEIVER);
1587 __ movq(rcx, args.GetArgumentOperand(2));
1588 __ SmiToInteger64(rcx, rcx);
1589 __ jmp(&try_allocate);
1591 // Patch the arguments.length and the parameters pointer.
1592 __ bind(&adaptor_frame);
1593 __ movq(rcx, Operand(rdx, ArgumentsAdaptorFrameConstants::kLengthOffset));
1594 __ movq(args.GetArgumentOperand(2), rcx);
1595 __ SmiToInteger64(rcx, rcx);
1596 __ lea(rdx, Operand(rdx, rcx, times_pointer_size,
1597 StandardFrameConstants::kCallerSPOffset));
1598 __ movq(args.GetArgumentOperand(1), rdx);
1600 // Try the new space allocation. Start out with computing the size of
1601 // the arguments object and the elements array.
1602 Label add_arguments_object;
1603 __ bind(&try_allocate);
1605 __ j(zero, &add_arguments_object, Label::kNear);
1606 __ lea(rcx, Operand(rcx, times_pointer_size, FixedArray::kHeaderSize));
1607 __ bind(&add_arguments_object);
1608 __ addq(rcx, Immediate(Heap::kArgumentsObjectSizeStrict));
1610 // Do the allocation of both objects in one go.
1611 __ Allocate(rcx, rax, rdx, rbx, &runtime, TAG_OBJECT);
1613 // Get the arguments boilerplate from the current native context.
1614 __ movq(rdi, Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
1615 __ movq(rdi, FieldOperand(rdi, GlobalObject::kNativeContextOffset));
1617 Context::SlotOffset(Context::STRICT_MODE_ARGUMENTS_BOILERPLATE_INDEX);
1618 __ movq(rdi, Operand(rdi, offset));
1620 // Copy the JS object part.
1621 for (int i = 0; i < JSObject::kHeaderSize; i += kPointerSize) {
1622 __ movq(rbx, FieldOperand(rdi, i));
1623 __ movq(FieldOperand(rax, i), rbx);
1626 // Get the length (smi tagged) and set that as an in-object property too.
1627 STATIC_ASSERT(Heap::kArgumentsLengthIndex == 0);
1628 __ movq(rcx, args.GetArgumentOperand(2));
1629 __ movq(FieldOperand(rax, JSObject::kHeaderSize +
1630 Heap::kArgumentsLengthIndex * kPointerSize),
1633 // If there are no actual arguments, we're done.
1638 // Get the parameters pointer from the stack.
1639 __ movq(rdx, args.GetArgumentOperand(1));
1641 // Set up the elements pointer in the allocated arguments object and
1642 // initialize the header in the elements fixed array.
1643 __ lea(rdi, Operand(rax, Heap::kArgumentsObjectSizeStrict));
1644 __ movq(FieldOperand(rax, JSObject::kElementsOffset), rdi);
1645 __ LoadRoot(kScratchRegister, Heap::kFixedArrayMapRootIndex);
1646 __ movq(FieldOperand(rdi, FixedArray::kMapOffset), kScratchRegister);
1649 __ movq(FieldOperand(rdi, FixedArray::kLengthOffset), rcx);
1650 // Untag the length for the loop below.
1651 __ SmiToInteger64(rcx, rcx);
1653 // Copy the fixed array slots.
1656 __ movq(rbx, Operand(rdx, -1 * kPointerSize)); // Skip receiver.
1657 __ movq(FieldOperand(rdi, FixedArray::kHeaderSize), rbx);
1658 __ addq(rdi, Immediate(kPointerSize));
1659 __ subq(rdx, Immediate(kPointerSize));
1661 __ j(not_zero, &loop);
1663 // Return and remove the on-stack parameters.
1665 __ ret(3 * kPointerSize);
1667 // Do the runtime call to allocate the arguments object.
1669 __ TailCallRuntime(Runtime::kNewStrictArgumentsFast, 3, 1);
1673 void RegExpExecStub::Generate(MacroAssembler* masm) {
1674 // Just jump directly to runtime if native RegExp is not selected at compile
1675 // time or if regexp entry in generated code is turned off runtime switch or
1677 #ifdef V8_INTERPRETED_REGEXP
1678 __ TailCallRuntime(Runtime::kRegExpExec, 4, 1);
1679 #else // V8_INTERPRETED_REGEXP
1681 // Stack frame on entry.
1682 // rsp[0] : return address
1683 // rsp[8] : last_match_info (expected JSArray)
1684 // rsp[16] : previous index
1685 // rsp[24] : subject string
1686 // rsp[32] : JSRegExp object
1688 enum RegExpExecStubArgumentIndices {
1689 JS_REG_EXP_OBJECT_ARGUMENT_INDEX,
1690 SUBJECT_STRING_ARGUMENT_INDEX,
1691 PREVIOUS_INDEX_ARGUMENT_INDEX,
1692 LAST_MATCH_INFO_ARGUMENT_INDEX,
1693 REG_EXP_EXEC_ARGUMENT_COUNT
1696 StackArgumentsAccessor args(rsp, REG_EXP_EXEC_ARGUMENT_COUNT,
1697 ARGUMENTS_DONT_CONTAIN_RECEIVER);
1699 // Ensure that a RegExp stack is allocated.
1700 Isolate* isolate = masm->isolate();
1701 ExternalReference address_of_regexp_stack_memory_address =
1702 ExternalReference::address_of_regexp_stack_memory_address(isolate);
1703 ExternalReference address_of_regexp_stack_memory_size =
1704 ExternalReference::address_of_regexp_stack_memory_size(isolate);
1705 __ Load(kScratchRegister, address_of_regexp_stack_memory_size);
1706 __ testq(kScratchRegister, kScratchRegister);
1707 __ j(zero, &runtime);
1709 // Check that the first argument is a JSRegExp object.
1710 __ movq(rax, args.GetArgumentOperand(JS_REG_EXP_OBJECT_ARGUMENT_INDEX));
1711 __ JumpIfSmi(rax, &runtime);
1712 __ CmpObjectType(rax, JS_REGEXP_TYPE, kScratchRegister);
1713 __ j(not_equal, &runtime);
1715 // Check that the RegExp has been compiled (data contains a fixed array).
1716 __ movq(rax, FieldOperand(rax, JSRegExp::kDataOffset));
1717 if (FLAG_debug_code) {
1718 Condition is_smi = masm->CheckSmi(rax);
1719 __ Check(NegateCondition(is_smi),
1720 kUnexpectedTypeForRegExpDataFixedArrayExpected);
1721 __ CmpObjectType(rax, FIXED_ARRAY_TYPE, kScratchRegister);
1722 __ Check(equal, kUnexpectedTypeForRegExpDataFixedArrayExpected);
1725 // rax: RegExp data (FixedArray)
1726 // Check the type of the RegExp. Only continue if type is JSRegExp::IRREGEXP.
1727 __ SmiToInteger32(rbx, FieldOperand(rax, JSRegExp::kDataTagOffset));
1728 __ cmpl(rbx, Immediate(JSRegExp::IRREGEXP));
1729 __ j(not_equal, &runtime);
1731 // rax: RegExp data (FixedArray)
1732 // Check that the number of captures fit in the static offsets vector buffer.
1733 __ SmiToInteger32(rdx,
1734 FieldOperand(rax, JSRegExp::kIrregexpCaptureCountOffset));
1735 // Check (number_of_captures + 1) * 2 <= offsets vector size
1736 // Or number_of_captures <= offsets vector size / 2 - 1
1737 STATIC_ASSERT(Isolate::kJSRegexpStaticOffsetsVectorSize >= 2);
1738 __ cmpl(rdx, Immediate(Isolate::kJSRegexpStaticOffsetsVectorSize / 2 - 1));
1739 __ j(above, &runtime);
1741 // Reset offset for possibly sliced string.
1743 __ movq(rdi, args.GetArgumentOperand(SUBJECT_STRING_ARGUMENT_INDEX));
1744 __ JumpIfSmi(rdi, &runtime);
1745 __ movq(r15, rdi); // Make a copy of the original subject string.
1746 __ movq(rbx, FieldOperand(rdi, HeapObject::kMapOffset));
1747 __ movzxbl(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset));
1748 // rax: RegExp data (FixedArray)
1749 // rdi: subject string
1750 // r15: subject string
1751 // Handle subject string according to its encoding and representation:
1752 // (1) Sequential two byte? If yes, go to (9).
1753 // (2) Sequential one byte? If yes, go to (6).
1754 // (3) Anything but sequential or cons? If yes, go to (7).
1755 // (4) Cons string. If the string is flat, replace subject with first string.
1756 // Otherwise bailout.
1757 // (5a) Is subject sequential two byte? If yes, go to (9).
1758 // (5b) Is subject external? If yes, go to (8).
1759 // (6) One byte sequential. Load regexp code for one byte.
1763 // Deferred code at the end of the stub:
1764 // (7) Not a long external string? If yes, go to (10).
1765 // (8) External string. Make it, offset-wise, look like a sequential string.
1766 // (8a) Is the external string one byte? If yes, go to (6).
1767 // (9) Two byte sequential. Load regexp code for one byte. Go to (E).
1768 // (10) Short external string or not a string? If yes, bail out to runtime.
1769 // (11) Sliced string. Replace subject with parent. Go to (5a).
1771 Label seq_one_byte_string /* 6 */, seq_two_byte_string /* 9 */,
1772 external_string /* 8 */, check_underlying /* 5a */,
1773 not_seq_nor_cons /* 7 */, check_code /* E */,
1774 not_long_external /* 10 */;
1776 // (1) Sequential two byte? If yes, go to (9).
1777 __ andb(rbx, Immediate(kIsNotStringMask |
1778 kStringRepresentationMask |
1779 kStringEncodingMask |
1780 kShortExternalStringMask));
1781 STATIC_ASSERT((kStringTag | kSeqStringTag | kTwoByteStringTag) == 0);
1782 __ j(zero, &seq_two_byte_string); // Go to (9).
1784 // (2) Sequential one byte? If yes, go to (6).
1785 // Any other sequential string must be one byte.
1786 __ andb(rbx, Immediate(kIsNotStringMask |
1787 kStringRepresentationMask |
1788 kShortExternalStringMask));
1789 __ j(zero, &seq_one_byte_string, Label::kNear); // Go to (6).
1791 // (3) Anything but sequential or cons? If yes, go to (7).
1792 // We check whether the subject string is a cons, since sequential strings
1793 // have already been covered.
1794 STATIC_ASSERT(kConsStringTag < kExternalStringTag);
1795 STATIC_ASSERT(kSlicedStringTag > kExternalStringTag);
1796 STATIC_ASSERT(kIsNotStringMask > kExternalStringTag);
1797 STATIC_ASSERT(kShortExternalStringTag > kExternalStringTag);
1798 __ cmpq(rbx, Immediate(kExternalStringTag));
1799 __ j(greater_equal, ¬_seq_nor_cons); // Go to (7).
1801 // (4) Cons string. Check that it's flat.
1802 // Replace subject with first string and reload instance type.
1803 __ CompareRoot(FieldOperand(rdi, ConsString::kSecondOffset),
1804 Heap::kempty_stringRootIndex);
1805 __ j(not_equal, &runtime);
1806 __ movq(rdi, FieldOperand(rdi, ConsString::kFirstOffset));
1807 __ bind(&check_underlying);
1808 __ movq(rbx, FieldOperand(rdi, HeapObject::kMapOffset));
1809 __ movq(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset));
1811 // (5a) Is subject sequential two byte? If yes, go to (9).
1812 __ testb(rbx, Immediate(kStringRepresentationMask | kStringEncodingMask));
1813 STATIC_ASSERT((kSeqStringTag | kTwoByteStringTag) == 0);
1814 __ j(zero, &seq_two_byte_string); // Go to (9).
1815 // (5b) Is subject external? If yes, go to (8).
1816 __ testb(rbx, Immediate(kStringRepresentationMask));
1817 // The underlying external string is never a short external string.
1818 STATIC_CHECK(ExternalString::kMaxShortLength < ConsString::kMinLength);
1819 STATIC_CHECK(ExternalString::kMaxShortLength < SlicedString::kMinLength);
1820 __ j(not_zero, &external_string); // Go to (8)
1822 // (6) One byte sequential. Load regexp code for one byte.
1823 __ bind(&seq_one_byte_string);
1824 // rax: RegExp data (FixedArray)
1825 __ movq(r11, FieldOperand(rax, JSRegExp::kDataAsciiCodeOffset));
1826 __ Set(rcx, 1); // Type is one byte.
1828 // (E) Carry on. String handling is done.
1829 __ bind(&check_code);
1830 // r11: irregexp code
1831 // Check that the irregexp code has been generated for the actual string
1832 // encoding. If it has, the field contains a code object otherwise it contains
1833 // smi (code flushing support)
1834 __ JumpIfSmi(r11, &runtime);
1836 // rdi: sequential subject string (or look-alike, external string)
1837 // r15: original subject string
1838 // rcx: encoding of subject string (1 if ASCII, 0 if two_byte);
1840 // Load used arguments before starting to push arguments for call to native
1841 // RegExp code to avoid handling changing stack height.
1842 // We have to use r15 instead of rdi to load the length because rdi might
1843 // have been only made to look like a sequential string when it actually
1844 // is an external string.
1845 __ movq(rbx, args.GetArgumentOperand(PREVIOUS_INDEX_ARGUMENT_INDEX));
1846 __ JumpIfNotSmi(rbx, &runtime);
1847 __ SmiCompare(rbx, FieldOperand(r15, String::kLengthOffset));
1848 __ j(above_equal, &runtime);
1849 __ SmiToInteger64(rbx, rbx);
1851 // rdi: subject string
1852 // rbx: previous index
1853 // rcx: encoding of subject string (1 if ASCII 0 if two_byte);
1855 // All checks done. Now push arguments for native regexp code.
1856 Counters* counters = masm->isolate()->counters();
1857 __ IncrementCounter(counters->regexp_entry_native(), 1);
1859 // Isolates: note we add an additional parameter here (isolate pointer).
1860 static const int kRegExpExecuteArguments = 9;
1861 int argument_slots_on_stack =
1862 masm->ArgumentStackSlotsForCFunctionCall(kRegExpExecuteArguments);
1863 __ EnterApiExitFrame(argument_slots_on_stack);
1865 // Argument 9: Pass current isolate address.
1866 __ LoadAddress(kScratchRegister,
1867 ExternalReference::isolate_address(masm->isolate()));
1868 __ movq(Operand(rsp, (argument_slots_on_stack - 1) * kPointerSize),
1871 // Argument 8: Indicate that this is a direct call from JavaScript.
1872 __ movq(Operand(rsp, (argument_slots_on_stack - 2) * kPointerSize),
1875 // Argument 7: Start (high end) of backtracking stack memory area.
1876 __ movq(kScratchRegister, address_of_regexp_stack_memory_address);
1877 __ movq(r9, Operand(kScratchRegister, 0));
1878 __ movq(kScratchRegister, address_of_regexp_stack_memory_size);
1879 __ addq(r9, Operand(kScratchRegister, 0));
1880 __ movq(Operand(rsp, (argument_slots_on_stack - 3) * kPointerSize), r9);
1882 // Argument 6: Set the number of capture registers to zero to force global
1883 // regexps to behave as non-global. This does not affect non-global regexps.
1884 // Argument 6 is passed in r9 on Linux and on the stack on Windows.
1886 __ movq(Operand(rsp, (argument_slots_on_stack - 4) * kPointerSize),
1892 // Argument 5: static offsets vector buffer.
1894 ExternalReference::address_of_static_offsets_vector(isolate));
1895 // Argument 5 passed in r8 on Linux and on the stack on Windows.
1897 __ movq(Operand(rsp, (argument_slots_on_stack - 5) * kPointerSize), r8);
1900 // rdi: subject string
1901 // rbx: previous index
1902 // rcx: encoding of subject string (1 if ASCII 0 if two_byte);
1904 // r14: slice offset
1905 // r15: original subject string
1907 // Argument 2: Previous index.
1908 __ movq(arg_reg_2, rbx);
1910 // Argument 4: End of string data
1911 // Argument 3: Start of string data
1912 Label setup_two_byte, setup_rest, got_length, length_not_from_slice;
1913 // Prepare start and end index of the input.
1914 // Load the length from the original sliced string if that is the case.
1916 __ SmiToInteger32(arg_reg_3, FieldOperand(r15, String::kLengthOffset));
1917 __ addq(r14, arg_reg_3); // Using arg3 as scratch.
1919 // rbx: start index of the input
1920 // r14: end index of the input
1921 // r15: original subject string
1922 __ testb(rcx, rcx); // Last use of rcx as encoding of subject string.
1923 __ j(zero, &setup_two_byte, Label::kNear);
1925 FieldOperand(rdi, r14, times_1, SeqOneByteString::kHeaderSize));
1927 FieldOperand(rdi, rbx, times_1, SeqOneByteString::kHeaderSize));
1928 __ jmp(&setup_rest, Label::kNear);
1929 __ bind(&setup_two_byte);
1931 FieldOperand(rdi, r14, times_2, SeqTwoByteString::kHeaderSize));
1933 FieldOperand(rdi, rbx, times_2, SeqTwoByteString::kHeaderSize));
1934 __ bind(&setup_rest);
1936 // Argument 1: Original subject string.
1937 // The original subject is in the previous stack frame. Therefore we have to
1938 // use rbp, which points exactly to one pointer size below the previous rsp.
1939 // (Because creating a new stack frame pushes the previous rbp onto the stack
1940 // and thereby moves up rsp by one kPointerSize.)
1941 __ movq(arg_reg_1, r15);
1943 // Locate the code entry and call it.
1944 __ addq(r11, Immediate(Code::kHeaderSize - kHeapObjectTag));
1947 __ LeaveApiExitFrame(true);
1949 // Check the result.
1952 __ cmpl(rax, Immediate(1));
1953 // We expect exactly one result since we force the called regexp to behave
1955 __ j(equal, &success, Label::kNear);
1956 __ cmpl(rax, Immediate(NativeRegExpMacroAssembler::EXCEPTION));
1957 __ j(equal, &exception);
1958 __ cmpl(rax, Immediate(NativeRegExpMacroAssembler::FAILURE));
1959 // If none of the above, it can only be retry.
1960 // Handle that in the runtime system.
1961 __ j(not_equal, &runtime);
1963 // For failure return null.
1964 __ LoadRoot(rax, Heap::kNullValueRootIndex);
1965 __ ret(REG_EXP_EXEC_ARGUMENT_COUNT * kPointerSize);
1967 // Load RegExp data.
1969 __ movq(rax, args.GetArgumentOperand(JS_REG_EXP_OBJECT_ARGUMENT_INDEX));
1970 __ movq(rcx, FieldOperand(rax, JSRegExp::kDataOffset));
1971 __ SmiToInteger32(rax,
1972 FieldOperand(rcx, JSRegExp::kIrregexpCaptureCountOffset));
1973 // Calculate number of capture registers (number_of_captures + 1) * 2.
1974 __ leal(rdx, Operand(rax, rax, times_1, 2));
1976 // rdx: Number of capture registers
1977 // Check that the fourth object is a JSArray object.
1978 __ movq(r15, args.GetArgumentOperand(LAST_MATCH_INFO_ARGUMENT_INDEX));
1979 __ JumpIfSmi(r15, &runtime);
1980 __ CmpObjectType(r15, JS_ARRAY_TYPE, kScratchRegister);
1981 __ j(not_equal, &runtime);
1982 // Check that the JSArray is in fast case.
1983 __ movq(rbx, FieldOperand(r15, JSArray::kElementsOffset));
1984 __ movq(rax, FieldOperand(rbx, HeapObject::kMapOffset));
1985 __ CompareRoot(rax, Heap::kFixedArrayMapRootIndex);
1986 __ j(not_equal, &runtime);
1987 // Check that the last match info has space for the capture registers and the
1988 // additional information. Ensure no overflow in add.
1989 STATIC_ASSERT(FixedArray::kMaxLength < kMaxInt - FixedArray::kLengthOffset);
1990 __ SmiToInteger32(rax, FieldOperand(rbx, FixedArray::kLengthOffset));
1991 __ subl(rax, Immediate(RegExpImpl::kLastMatchOverhead));
1993 __ j(greater, &runtime);
1995 // rbx: last_match_info backing store (FixedArray)
1996 // rdx: number of capture registers
1997 // Store the capture count.
1998 __ Integer32ToSmi(kScratchRegister, rdx);
1999 __ movq(FieldOperand(rbx, RegExpImpl::kLastCaptureCountOffset),
2001 // Store last subject and last input.
2002 __ movq(rax, args.GetArgumentOperand(SUBJECT_STRING_ARGUMENT_INDEX));
2003 __ movq(FieldOperand(rbx, RegExpImpl::kLastSubjectOffset), rax);
2005 __ RecordWriteField(rbx,
2006 RegExpImpl::kLastSubjectOffset,
2011 __ movq(FieldOperand(rbx, RegExpImpl::kLastInputOffset), rax);
2012 __ RecordWriteField(rbx,
2013 RegExpImpl::kLastInputOffset,
2018 // Get the static offsets vector filled by the native regexp code.
2020 ExternalReference::address_of_static_offsets_vector(isolate));
2022 // rbx: last_match_info backing store (FixedArray)
2023 // rcx: offsets vector
2024 // rdx: number of capture registers
2025 Label next_capture, done;
2026 // Capture register counter starts from number of capture registers and
2027 // counts down until wraping after zero.
2028 __ bind(&next_capture);
2029 __ subq(rdx, Immediate(1));
2030 __ j(negative, &done, Label::kNear);
2031 // Read the value from the static offsets vector buffer and make it a smi.
2032 __ movl(rdi, Operand(rcx, rdx, times_int_size, 0));
2033 __ Integer32ToSmi(rdi, rdi);
2034 // Store the smi value in the last match info.
2035 __ movq(FieldOperand(rbx,
2038 RegExpImpl::kFirstCaptureOffset),
2040 __ jmp(&next_capture);
2043 // Return last match info.
2045 __ ret(REG_EXP_EXEC_ARGUMENT_COUNT * kPointerSize);
2047 __ bind(&exception);
2048 // Result must now be exception. If there is no pending exception already a
2049 // stack overflow (on the backtrack stack) was detected in RegExp code but
2050 // haven't created the exception yet. Handle that in the runtime system.
2051 // TODO(592): Rerunning the RegExp to get the stack overflow exception.
2052 ExternalReference pending_exception_address(
2053 Isolate::kPendingExceptionAddress, isolate);
2054 Operand pending_exception_operand =
2055 masm->ExternalOperand(pending_exception_address, rbx);
2056 __ movq(rax, pending_exception_operand);
2057 __ LoadRoot(rdx, Heap::kTheHoleValueRootIndex);
2059 __ j(equal, &runtime);
2060 __ movq(pending_exception_operand, rdx);
2062 __ CompareRoot(rax, Heap::kTerminationExceptionRootIndex);
2063 Label termination_exception;
2064 __ j(equal, &termination_exception, Label::kNear);
2067 __ bind(&termination_exception);
2068 __ ThrowUncatchable(rax);
2070 // Do the runtime call to execute the regexp.
2072 __ TailCallRuntime(Runtime::kRegExpExec, 4, 1);
2074 // Deferred code for string handling.
2075 // (7) Not a long external string? If yes, go to (10).
2076 __ bind(¬_seq_nor_cons);
2077 // Compare flags are still set from (3).
2078 __ j(greater, ¬_long_external, Label::kNear); // Go to (10).
2080 // (8) External string. Short external strings have been ruled out.
2081 __ bind(&external_string);
2082 __ movq(rbx, FieldOperand(rdi, HeapObject::kMapOffset));
2083 __ movzxbl(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset));
2084 if (FLAG_debug_code) {
2085 // Assert that we do not have a cons or slice (indirect strings) here.
2086 // Sequential strings have already been ruled out.
2087 __ testb(rbx, Immediate(kIsIndirectStringMask));
2088 __ Assert(zero, kExternalStringExpectedButNotFound);
2090 __ movq(rdi, FieldOperand(rdi, ExternalString::kResourceDataOffset));
2091 // Move the pointer so that offset-wise, it looks like a sequential string.
2092 STATIC_ASSERT(SeqTwoByteString::kHeaderSize == SeqOneByteString::kHeaderSize);
2093 __ subq(rdi, Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
2094 STATIC_ASSERT(kTwoByteStringTag == 0);
2095 // (8a) Is the external string one byte? If yes, go to (6).
2096 __ testb(rbx, Immediate(kStringEncodingMask));
2097 __ j(not_zero, &seq_one_byte_string); // Goto (6).
2099 // rdi: subject string (flat two-byte)
2100 // rax: RegExp data (FixedArray)
2101 // (9) Two byte sequential. Load regexp code for one byte. Go to (E).
2102 __ bind(&seq_two_byte_string);
2103 __ movq(r11, FieldOperand(rax, JSRegExp::kDataUC16CodeOffset));
2104 __ Set(rcx, 0); // Type is two byte.
2105 __ jmp(&check_code); // Go to (E).
2107 // (10) Not a string or a short external string? If yes, bail out to runtime.
2108 __ bind(¬_long_external);
2109 // Catch non-string subject or short external string.
2110 STATIC_ASSERT(kNotStringTag != 0 && kShortExternalStringTag !=0);
2111 __ testb(rbx, Immediate(kIsNotStringMask | kShortExternalStringMask));
2112 __ j(not_zero, &runtime);
2114 // (11) Sliced string. Replace subject with parent. Go to (5a).
2115 // Load offset into r14 and replace subject string with parent.
2116 __ SmiToInteger32(r14, FieldOperand(rdi, SlicedString::kOffsetOffset));
2117 __ movq(rdi, FieldOperand(rdi, SlicedString::kParentOffset));
2118 __ jmp(&check_underlying);
2119 #endif // V8_INTERPRETED_REGEXP
2123 void RegExpConstructResultStub::Generate(MacroAssembler* masm) {
2124 const int kMaxInlineLength = 100;
2127 StackArgumentsAccessor args(rsp, 3, ARGUMENTS_DONT_CONTAIN_RECEIVER);
2128 __ movq(r8, args.GetArgumentOperand(0));
2129 __ JumpIfNotSmi(r8, &slowcase);
2130 __ SmiToInteger32(rbx, r8);
2131 __ cmpl(rbx, Immediate(kMaxInlineLength));
2132 __ j(above, &slowcase);
2133 // Smi-tagging is equivalent to multiplying by 2.
2134 STATIC_ASSERT(kSmiTag == 0);
2135 STATIC_ASSERT(kSmiTagSize == 1);
2136 // Allocate RegExpResult followed by FixedArray with size in rbx.
2137 // JSArray: [Map][empty properties][Elements][Length-smi][index][input]
2138 // Elements: [Map][Length][..elements..]
2139 __ Allocate(JSRegExpResult::kSize + FixedArray::kHeaderSize,
2141 rbx, // In: Number of elements.
2142 rax, // Out: Start of allocation (tagged).
2143 rcx, // Out: End of allocation.
2144 rdx, // Scratch register
2147 // rax: Start of allocated area, object-tagged.
2148 // rbx: Number of array elements as int32.
2149 // r8: Number of array elements as smi.
2151 // Set JSArray map to global.regexp_result_map().
2152 __ movq(rdx, ContextOperand(rsi, Context::GLOBAL_OBJECT_INDEX));
2153 __ movq(rdx, FieldOperand(rdx, GlobalObject::kNativeContextOffset));
2154 __ movq(rdx, ContextOperand(rdx, Context::REGEXP_RESULT_MAP_INDEX));
2155 __ movq(FieldOperand(rax, HeapObject::kMapOffset), rdx);
2157 // Set empty properties FixedArray.
2158 __ LoadRoot(kScratchRegister, Heap::kEmptyFixedArrayRootIndex);
2159 __ movq(FieldOperand(rax, JSObject::kPropertiesOffset), kScratchRegister);
2161 // Set elements to point to FixedArray allocated right after the JSArray.
2162 __ lea(rcx, Operand(rax, JSRegExpResult::kSize));
2163 __ movq(FieldOperand(rax, JSObject::kElementsOffset), rcx);
2165 // Set input, index and length fields from arguments.
2166 __ movq(r8, args.GetArgumentOperand(2));
2167 __ movq(FieldOperand(rax, JSRegExpResult::kInputOffset), r8);
2168 __ movq(r8, args.GetArgumentOperand(1));
2169 __ movq(FieldOperand(rax, JSRegExpResult::kIndexOffset), r8);
2170 __ movq(r8, args.GetArgumentOperand(0));
2171 __ movq(FieldOperand(rax, JSArray::kLengthOffset), r8);
2173 // Fill out the elements FixedArray.
2176 // rbx: Number of elements in array as int32.
2179 __ LoadRoot(kScratchRegister, Heap::kFixedArrayMapRootIndex);
2180 __ movq(FieldOperand(rcx, HeapObject::kMapOffset), kScratchRegister);
2182 __ Integer32ToSmi(rdx, rbx);
2183 __ movq(FieldOperand(rcx, FixedArray::kLengthOffset), rdx);
2184 // Fill contents of fixed-array with undefined.
2185 __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
2186 __ lea(rcx, FieldOperand(rcx, FixedArray::kHeaderSize));
2187 // Fill fixed array elements with undefined.
2189 // rbx: Number of elements in array that remains to be filled, as int32.
2190 // rcx: Start of elements in FixedArray.
2195 __ j(less_equal, &done); // Jump if rcx is negative or zero.
2196 __ subl(rbx, Immediate(1));
2197 __ movq(Operand(rcx, rbx, times_pointer_size, 0), rdx);
2201 __ ret(3 * kPointerSize);
2204 __ TailCallRuntime(Runtime::kRegExpConstructResult, 3, 1);
2208 static int NegativeComparisonResult(Condition cc) {
2209 ASSERT(cc != equal);
2210 ASSERT((cc == less) || (cc == less_equal)
2211 || (cc == greater) || (cc == greater_equal));
2212 return (cc == greater || cc == greater_equal) ? LESS : GREATER;
2216 static void CheckInputType(MacroAssembler* masm,
2218 CompareIC::State expected,
2221 if (expected == CompareIC::SMI) {
2222 __ JumpIfNotSmi(input, fail);
2223 } else if (expected == CompareIC::NUMBER) {
2224 __ JumpIfSmi(input, &ok);
2225 __ CompareMap(input, masm->isolate()->factory()->heap_number_map(), NULL);
2226 __ j(not_equal, fail);
2228 // We could be strict about internalized/non-internalized here, but as long as
2229 // hydrogen doesn't care, the stub doesn't have to care either.
2234 static void BranchIfNotInternalizedString(MacroAssembler* masm,
2238 __ JumpIfSmi(object, label);
2239 __ movq(scratch, FieldOperand(object, HeapObject::kMapOffset));
2241 FieldOperand(scratch, Map::kInstanceTypeOffset));
2242 STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0);
2243 __ testb(scratch, Immediate(kIsNotStringMask | kIsNotInternalizedMask));
2244 __ j(not_zero, label);
2248 void ICCompareStub::GenerateGeneric(MacroAssembler* masm) {
2249 Label check_unequal_objects, done;
2250 Condition cc = GetCondition();
2251 Factory* factory = masm->isolate()->factory();
2254 CheckInputType(masm, rdx, left_, &miss);
2255 CheckInputType(masm, rax, right_, &miss);
2257 // Compare two smis.
2258 Label non_smi, smi_done;
2259 __ JumpIfNotBothSmi(rax, rdx, &non_smi);
2261 __ j(no_overflow, &smi_done);
2262 __ not_(rdx); // Correct sign in case of overflow. rdx cannot be 0 here.
2268 // The compare stub returns a positive, negative, or zero 64-bit integer
2269 // value in rax, corresponding to result of comparing the two inputs.
2270 // NOTICE! This code is only reached after a smi-fast-case check, so
2271 // it is certain that at least one operand isn't a smi.
2273 // Two identical objects are equal unless they are both NaN or undefined.
2275 Label not_identical;
2277 __ j(not_equal, ¬_identical, Label::kNear);
2280 // Check for undefined. undefined OP undefined is false even though
2281 // undefined == undefined.
2282 Label check_for_nan;
2283 __ CompareRoot(rdx, Heap::kUndefinedValueRootIndex);
2284 __ j(not_equal, &check_for_nan, Label::kNear);
2285 __ Set(rax, NegativeComparisonResult(cc));
2287 __ bind(&check_for_nan);
2290 // Test for NaN. Sadly, we can't just compare to Factory::nan_value(),
2291 // so we do the second best thing - test it ourselves.
2293 // If it's not a heap number, then return equal for (in)equality operator.
2294 __ Cmp(FieldOperand(rdx, HeapObject::kMapOffset),
2295 factory->heap_number_map());
2296 __ j(equal, &heap_number, Label::kNear);
2298 // Call runtime on identical objects. Otherwise return equal.
2299 __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rcx);
2300 __ j(above_equal, ¬_identical, Label::kNear);
2305 __ bind(&heap_number);
2306 // It is a heap number, so return equal if it's not NaN.
2307 // For NaN, return 1 for every condition except greater and
2308 // greater-equal. Return -1 for them, so the comparison yields
2309 // false for all conditions except not-equal.
2311 __ movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset));
2312 __ ucomisd(xmm0, xmm0);
2313 __ setcc(parity_even, rax);
2314 // rax is 0 for equal non-NaN heapnumbers, 1 for NaNs.
2315 if (cc == greater_equal || cc == greater) {
2320 __ bind(¬_identical);
2323 if (cc == equal) { // Both strict and non-strict.
2324 Label slow; // Fallthrough label.
2326 // If we're doing a strict equality comparison, we don't have to do
2327 // type conversion, so we generate code to do fast comparison for objects
2328 // and oddballs. Non-smi numbers and strings still go through the usual
2331 // If either is a Smi (we know that not both are), then they can only
2332 // be equal if the other is a HeapNumber. If so, use the slow case.
2335 __ SelectNonSmi(rbx, rax, rdx, ¬_smis);
2337 // Check if the non-smi operand is a heap number.
2338 __ Cmp(FieldOperand(rbx, HeapObject::kMapOffset),
2339 factory->heap_number_map());
2340 // If heap number, handle it in the slow case.
2342 // Return non-equal. ebx (the lower half of rbx) is not zero.
2349 // If either operand is a JSObject or an oddball value, then they are not
2350 // equal since their pointers are different
2351 // There is no test for undetectability in strict equality.
2353 // If the first object is a JS object, we have done pointer comparison.
2354 STATIC_ASSERT(LAST_TYPE == LAST_SPEC_OBJECT_TYPE);
2355 Label first_non_object;
2356 __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rcx);
2357 __ j(below, &first_non_object, Label::kNear);
2358 // Return non-zero (rax (not rax) is not zero)
2359 Label return_not_equal;
2360 STATIC_ASSERT(kHeapObjectTag != 0);
2361 __ bind(&return_not_equal);
2364 __ bind(&first_non_object);
2365 // Check for oddballs: true, false, null, undefined.
2366 __ CmpInstanceType(rcx, ODDBALL_TYPE);
2367 __ j(equal, &return_not_equal);
2369 __ CmpObjectType(rdx, FIRST_SPEC_OBJECT_TYPE, rcx);
2370 __ j(above_equal, &return_not_equal);
2372 // Check for oddballs: true, false, null, undefined.
2373 __ CmpInstanceType(rcx, ODDBALL_TYPE);
2374 __ j(equal, &return_not_equal);
2376 // Fall through to the general case.
2381 // Generate the number comparison code.
2382 Label non_number_comparison;
2384 FloatingPointHelper::LoadSSE2UnknownOperands(masm, &non_number_comparison);
2387 __ ucomisd(xmm0, xmm1);
2389 // Don't base result on EFLAGS when a NaN is involved.
2390 __ j(parity_even, &unordered, Label::kNear);
2391 // Return a result of -1, 0, or 1, based on EFLAGS.
2392 __ setcc(above, rax);
2393 __ setcc(below, rcx);
2397 // If one of the numbers was NaN, then the result is always false.
2398 // The cc is never not-equal.
2399 __ bind(&unordered);
2400 ASSERT(cc != not_equal);
2401 if (cc == less || cc == less_equal) {
2408 // The number comparison code did not provide a valid result.
2409 __ bind(&non_number_comparison);
2411 // Fast negative check for internalized-to-internalized equality.
2412 Label check_for_strings;
2414 BranchIfNotInternalizedString(
2415 masm, &check_for_strings, rax, kScratchRegister);
2416 BranchIfNotInternalizedString(
2417 masm, &check_for_strings, rdx, kScratchRegister);
2419 // We've already checked for object identity, so if both operands are
2420 // internalized strings they aren't equal. Register rax (not rax) already
2421 // holds a non-zero value, which indicates not equal, so just return.
2425 __ bind(&check_for_strings);
2427 __ JumpIfNotBothSequentialAsciiStrings(
2428 rdx, rax, rcx, rbx, &check_unequal_objects);
2430 // Inline comparison of ASCII strings.
2432 StringCompareStub::GenerateFlatAsciiStringEquals(masm,
2438 StringCompareStub::GenerateCompareFlatAsciiStrings(masm,
2448 __ Abort(kUnexpectedFallThroughFromStringComparison);
2451 __ bind(&check_unequal_objects);
2452 if (cc == equal && !strict()) {
2453 // Not strict equality. Objects are unequal if
2454 // they are both JSObjects and not undetectable,
2455 // and their pointers are different.
2456 Label not_both_objects, return_unequal;
2457 // At most one is a smi, so we can test for smi by adding the two.
2458 // A smi plus a heap object has the low bit set, a heap object plus
2459 // a heap object has the low bit clear.
2460 STATIC_ASSERT(kSmiTag == 0);
2461 STATIC_ASSERT(kSmiTagMask == 1);
2462 __ lea(rcx, Operand(rax, rdx, times_1, 0));
2463 __ testb(rcx, Immediate(kSmiTagMask));
2464 __ j(not_zero, ¬_both_objects, Label::kNear);
2465 __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rbx);
2466 __ j(below, ¬_both_objects, Label::kNear);
2467 __ CmpObjectType(rdx, FIRST_SPEC_OBJECT_TYPE, rcx);
2468 __ j(below, ¬_both_objects, Label::kNear);
2469 __ testb(FieldOperand(rbx, Map::kBitFieldOffset),
2470 Immediate(1 << Map::kIsUndetectable));
2471 __ j(zero, &return_unequal, Label::kNear);
2472 __ testb(FieldOperand(rcx, Map::kBitFieldOffset),
2473 Immediate(1 << Map::kIsUndetectable));
2474 __ j(zero, &return_unequal, Label::kNear);
2475 // The objects are both undetectable, so they both compare as the value
2476 // undefined, and are equal.
2478 __ bind(&return_unequal);
2479 // Return non-equal by returning the non-zero object pointer in rax,
2480 // or return equal if we fell through to here.
2482 __ bind(¬_both_objects);
2485 // Push arguments below the return address to prepare jump to builtin.
2486 __ PopReturnAddressTo(rcx);
2490 // Figure out which native to call and setup the arguments.
2491 Builtins::JavaScript builtin;
2493 builtin = strict() ? Builtins::STRICT_EQUALS : Builtins::EQUALS;
2495 builtin = Builtins::COMPARE;
2496 __ Push(Smi::FromInt(NegativeComparisonResult(cc)));
2499 __ PushReturnAddressFrom(rcx);
2501 // Call the native; it returns -1 (less), 0 (equal), or 1 (greater)
2502 // tagged as a small integer.
2503 __ InvokeBuiltin(builtin, JUMP_FUNCTION);
2510 static void GenerateRecordCallTarget(MacroAssembler* masm) {
2511 // Cache the called function in a global property cell. Cache states
2512 // are uninitialized, monomorphic (indicated by a JSFunction), and
2514 // rax : number of arguments to the construct function
2515 // rbx : cache cell for call target
2516 // rdi : the function to call
2517 Isolate* isolate = masm->isolate();
2518 Label initialize, done, miss, megamorphic, not_array_function;
2520 // Load the cache state into rcx.
2521 __ movq(rcx, FieldOperand(rbx, Cell::kValueOffset));
2523 // A monomorphic cache hit or an already megamorphic state: invoke the
2524 // function without changing the state.
2527 __ Cmp(rcx, TypeFeedbackCells::MegamorphicSentinel(isolate));
2530 // If we came here, we need to see if we are the array function.
2531 // If we didn't have a matching function, and we didn't find the megamorph
2532 // sentinel, then we have in the cell either some other function or an
2533 // AllocationSite. Do a map check on the object in rcx.
2534 Handle<Map> allocation_site_map =
2535 masm->isolate()->factory()->allocation_site_map();
2536 __ Cmp(FieldOperand(rcx, 0), allocation_site_map);
2537 __ j(not_equal, &miss);
2539 // Make sure the function is the Array() function
2540 __ LoadArrayFunction(rcx);
2542 __ j(not_equal, &megamorphic);
2547 // A monomorphic miss (i.e, here the cache is not uninitialized) goes
2549 __ Cmp(rcx, TypeFeedbackCells::UninitializedSentinel(isolate));
2550 __ j(equal, &initialize);
2551 // MegamorphicSentinel is an immortal immovable object (undefined) so no
2552 // write-barrier is needed.
2553 __ bind(&megamorphic);
2554 __ Move(FieldOperand(rbx, Cell::kValueOffset),
2555 TypeFeedbackCells::MegamorphicSentinel(isolate));
2558 // An uninitialized cache is patched with the function or sentinel to
2559 // indicate the ElementsKind if function is the Array constructor.
2560 __ bind(&initialize);
2561 // Make sure the function is the Array() function
2562 __ LoadArrayFunction(rcx);
2564 __ j(not_equal, ¬_array_function);
2566 // The target function is the Array constructor,
2567 // Create an AllocationSite if we don't already have it, store it in the cell
2569 FrameScope scope(masm, StackFrame::INTERNAL);
2571 // Arguments register must be smi-tagged to call out.
2572 __ Integer32ToSmi(rax, rax);
2577 CreateAllocationSiteStub create_stub;
2578 __ CallStub(&create_stub);
2583 __ SmiToInteger32(rax, rax);
2587 __ bind(¬_array_function);
2588 __ movq(FieldOperand(rbx, Cell::kValueOffset), rdi);
2589 // No need for a write barrier here - cells are rescanned.
2595 void CallFunctionStub::Generate(MacroAssembler* masm) {
2596 // rbx : cache cell for call target
2597 // rdi : the function to call
2598 Isolate* isolate = masm->isolate();
2599 Label slow, non_function;
2600 StackArgumentsAccessor args(rsp, argc_);
2602 // The receiver might implicitly be the global object. This is
2603 // indicated by passing the hole as the receiver to the call
2605 if (ReceiverMightBeImplicit()) {
2607 // Get the receiver from the stack.
2608 __ movq(rax, args.GetReceiverOperand());
2609 // Call as function is indicated with the hole.
2610 __ CompareRoot(rax, Heap::kTheHoleValueRootIndex);
2611 __ j(not_equal, &call, Label::kNear);
2612 // Patch the receiver on the stack with the global receiver object.
2613 __ movq(rcx, GlobalObjectOperand());
2614 __ movq(rcx, FieldOperand(rcx, GlobalObject::kGlobalReceiverOffset));
2615 __ movq(args.GetReceiverOperand(), rcx);
2619 // Check that the function really is a JavaScript function.
2620 __ JumpIfSmi(rdi, &non_function);
2621 // Goto slow case if we do not have a function.
2622 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
2623 __ j(not_equal, &slow);
2625 if (RecordCallTarget()) {
2626 GenerateRecordCallTarget(masm);
2629 // Fast-case: Just invoke the function.
2630 ParameterCount actual(argc_);
2632 if (ReceiverMightBeImplicit()) {
2633 Label call_as_function;
2634 __ CompareRoot(rax, Heap::kTheHoleValueRootIndex);
2635 __ j(equal, &call_as_function);
2636 __ InvokeFunction(rdi,
2641 __ bind(&call_as_function);
2643 __ InvokeFunction(rdi,
2649 // Slow-case: Non-function called.
2651 if (RecordCallTarget()) {
2652 // If there is a call target cache, mark it megamorphic in the
2653 // non-function case. MegamorphicSentinel is an immortal immovable
2654 // object (undefined) so no write barrier is needed.
2655 __ Move(FieldOperand(rbx, Cell::kValueOffset),
2656 TypeFeedbackCells::MegamorphicSentinel(isolate));
2658 // Check for function proxy.
2659 __ CmpInstanceType(rcx, JS_FUNCTION_PROXY_TYPE);
2660 __ j(not_equal, &non_function);
2661 __ PopReturnAddressTo(rcx);
2662 __ push(rdi); // put proxy as additional argument under return address
2663 __ PushReturnAddressFrom(rcx);
2664 __ Set(rax, argc_ + 1);
2666 __ SetCallKind(rcx, CALL_AS_METHOD);
2667 __ GetBuiltinEntry(rdx, Builtins::CALL_FUNCTION_PROXY);
2669 Handle<Code> adaptor =
2670 masm->isolate()->builtins()->ArgumentsAdaptorTrampoline();
2671 __ jmp(adaptor, RelocInfo::CODE_TARGET);
2674 // CALL_NON_FUNCTION expects the non-function callee as receiver (instead
2675 // of the original receiver from the call site).
2676 __ bind(&non_function);
2677 __ movq(args.GetReceiverOperand(), rdi);
2680 __ SetCallKind(rcx, CALL_AS_METHOD);
2681 __ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION);
2682 Handle<Code> adaptor =
2683 isolate->builtins()->ArgumentsAdaptorTrampoline();
2684 __ Jump(adaptor, RelocInfo::CODE_TARGET);
2688 void CallConstructStub::Generate(MacroAssembler* masm) {
2689 // rax : number of arguments
2690 // rbx : cache cell for call target
2691 // rdi : constructor function
2692 Label slow, non_function_call;
2694 // Check that function is not a smi.
2695 __ JumpIfSmi(rdi, &non_function_call);
2696 // Check that function is a JSFunction.
2697 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
2698 __ j(not_equal, &slow);
2700 if (RecordCallTarget()) {
2701 GenerateRecordCallTarget(masm);
2704 // Jump to the function-specific construct stub.
2705 Register jmp_reg = rcx;
2706 __ movq(jmp_reg, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
2707 __ movq(jmp_reg, FieldOperand(jmp_reg,
2708 SharedFunctionInfo::kConstructStubOffset));
2709 __ lea(jmp_reg, FieldOperand(jmp_reg, Code::kHeaderSize));
2712 // rdi: called object
2713 // rax: number of arguments
2717 __ CmpInstanceType(rcx, JS_FUNCTION_PROXY_TYPE);
2718 __ j(not_equal, &non_function_call);
2719 __ GetBuiltinEntry(rdx, Builtins::CALL_FUNCTION_PROXY_AS_CONSTRUCTOR);
2722 __ bind(&non_function_call);
2723 __ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION_AS_CONSTRUCTOR);
2725 // Set expected number of arguments to zero (not changing rax).
2727 __ SetCallKind(rcx, CALL_AS_METHOD);
2728 __ Jump(masm->isolate()->builtins()->ArgumentsAdaptorTrampoline(),
2729 RelocInfo::CODE_TARGET);
2733 bool CEntryStub::NeedsImmovableCode() {
2738 bool CEntryStub::IsPregenerated(Isolate* isolate) {
2740 return result_size_ == 1;
2747 void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) {
2748 CEntryStub::GenerateAheadOfTime(isolate);
2749 StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate);
2750 StubFailureTrampolineStub::GenerateAheadOfTime(isolate);
2751 // It is important that the store buffer overflow stubs are generated first.
2752 RecordWriteStub::GenerateFixedRegStubsAheadOfTime(isolate);
2753 ArrayConstructorStubBase::GenerateStubsAheadOfTime(isolate);
2754 CreateAllocationSiteStub::GenerateAheadOfTime(isolate);
2755 BinaryOpStub::GenerateAheadOfTime(isolate);
2759 void CodeStub::GenerateFPStubs(Isolate* isolate) {
2763 void CEntryStub::GenerateAheadOfTime(Isolate* isolate) {
2764 CEntryStub stub(1, kDontSaveFPRegs);
2765 stub.GetCode(isolate)->set_is_pregenerated(true);
2766 CEntryStub save_doubles(1, kSaveFPRegs);
2767 save_doubles.GetCode(isolate)->set_is_pregenerated(true);
2771 static void JumpIfOOM(MacroAssembler* masm,
2775 __ movq(scratch, value);
2776 STATIC_ASSERT(Failure::OUT_OF_MEMORY_EXCEPTION == 3);
2777 STATIC_ASSERT(kFailureTag == 3);
2778 __ and_(scratch, Immediate(0xf));
2779 __ cmpq(scratch, Immediate(0xf));
2780 __ j(equal, oom_label);
2784 void CEntryStub::GenerateCore(MacroAssembler* masm,
2785 Label* throw_normal_exception,
2786 Label* throw_termination_exception,
2787 Label* throw_out_of_memory_exception,
2789 bool always_allocate_scope) {
2790 // rax: result parameter for PerformGC, if any.
2791 // rbx: pointer to C function (C callee-saved).
2792 // rbp: frame pointer (restored after C call).
2793 // rsp: stack pointer (restored after C call).
2794 // r14: number of arguments including receiver (C callee-saved).
2795 // r15: pointer to the first argument (C callee-saved).
2796 // This pointer is reused in LeaveExitFrame(), so it is stored in a
2797 // callee-saved register.
2799 // Simple results returned in rax (both AMD64 and Win64 calling conventions).
2800 // Complex results must be written to address passed as first argument.
2801 // AMD64 calling convention: a struct of two pointers in rax+rdx
2803 // Check stack alignment.
2804 if (FLAG_debug_code) {
2805 __ CheckStackAlignment();
2809 // Pass failure code returned from last attempt as first argument to
2810 // PerformGC. No need to use PrepareCallCFunction/CallCFunction here as the
2811 // stack is known to be aligned. This function takes one argument which is
2812 // passed in register.
2813 __ movq(arg_reg_2, ExternalReference::isolate_address(masm->isolate()));
2814 __ movq(arg_reg_1, rax);
2815 __ movq(kScratchRegister,
2816 ExternalReference::perform_gc_function(masm->isolate()));
2817 __ call(kScratchRegister);
2820 ExternalReference scope_depth =
2821 ExternalReference::heap_always_allocate_scope_depth(masm->isolate());
2822 if (always_allocate_scope) {
2823 Operand scope_depth_operand = masm->ExternalOperand(scope_depth);
2824 __ incl(scope_depth_operand);
2829 // Windows 64-bit ABI passes arguments in rcx, rdx, r8, r9.
2830 // Pass argv and argc as two parameters. The arguments object will
2831 // be created by stubs declared by DECLARE_RUNTIME_FUNCTION().
2832 if (result_size_ < 2) {
2833 // Pass a pointer to the Arguments object as the first argument.
2834 // Return result in single register (rax).
2835 __ movq(rcx, r14); // argc.
2836 __ movq(rdx, r15); // argv.
2837 __ movq(r8, ExternalReference::isolate_address(masm->isolate()));
2839 ASSERT_EQ(2, result_size_);
2840 // Pass a pointer to the result location as the first argument.
2841 __ lea(rcx, StackSpaceOperand(2));
2842 // Pass a pointer to the Arguments object as the second argument.
2843 __ movq(rdx, r14); // argc.
2844 __ movq(r8, r15); // argv.
2845 __ movq(r9, ExternalReference::isolate_address(masm->isolate()));
2849 // GCC passes arguments in rdi, rsi, rdx, rcx, r8, r9.
2850 __ movq(rdi, r14); // argc.
2851 __ movq(rsi, r15); // argv.
2852 __ movq(rdx, ExternalReference::isolate_address(masm->isolate()));
2855 // Result is in rax - do not destroy this register!
2857 if (always_allocate_scope) {
2858 Operand scope_depth_operand = masm->ExternalOperand(scope_depth);
2859 __ decl(scope_depth_operand);
2862 // Check for failure result.
2863 Label failure_returned;
2864 STATIC_ASSERT(((kFailureTag + 1) & kFailureTagMask) == 0);
2866 // If return value is on the stack, pop it to registers.
2867 if (result_size_ > 1) {
2868 ASSERT_EQ(2, result_size_);
2869 // Read result values stored on stack. Result is stored
2870 // above the four argument mirror slots and the two
2871 // Arguments object slots.
2872 __ movq(rax, Operand(rsp, 6 * kPointerSize));
2873 __ movq(rdx, Operand(rsp, 7 * kPointerSize));
2876 __ lea(rcx, Operand(rax, 1));
2877 // Lower 2 bits of rcx are 0 iff rax has failure tag.
2878 __ testl(rcx, Immediate(kFailureTagMask));
2879 __ j(zero, &failure_returned);
2881 // Exit the JavaScript to C++ exit frame.
2882 __ LeaveExitFrame(save_doubles_);
2885 // Handling of failure.
2886 __ bind(&failure_returned);
2889 // If the returned exception is RETRY_AFTER_GC continue at retry label
2890 STATIC_ASSERT(Failure::RETRY_AFTER_GC == 0);
2891 __ testl(rax, Immediate(((1 << kFailureTypeTagSize) - 1) << kFailureTagSize));
2892 __ j(zero, &retry, Label::kNear);
2894 // Special handling of out of memory exceptions.
2895 JumpIfOOM(masm, rax, kScratchRegister, throw_out_of_memory_exception);
2897 // Retrieve the pending exception.
2898 ExternalReference pending_exception_address(
2899 Isolate::kPendingExceptionAddress, masm->isolate());
2900 Operand pending_exception_operand =
2901 masm->ExternalOperand(pending_exception_address);
2902 __ movq(rax, pending_exception_operand);
2904 // See if we just retrieved an OOM exception.
2905 JumpIfOOM(masm, rax, kScratchRegister, throw_out_of_memory_exception);
2907 // Clear the pending exception.
2908 pending_exception_operand =
2909 masm->ExternalOperand(pending_exception_address);
2910 __ LoadRoot(rdx, Heap::kTheHoleValueRootIndex);
2911 __ movq(pending_exception_operand, rdx);
2913 // Special handling of termination exceptions which are uncatchable
2914 // by javascript code.
2915 __ CompareRoot(rax, Heap::kTerminationExceptionRootIndex);
2916 __ j(equal, throw_termination_exception);
2918 // Handle normal exception.
2919 __ jmp(throw_normal_exception);
2926 void CEntryStub::Generate(MacroAssembler* masm) {
2927 // rax: number of arguments including receiver
2928 // rbx: pointer to C function (C callee-saved)
2929 // rbp: frame pointer of calling JS frame (restored after C call)
2930 // rsp: stack pointer (restored after C call)
2931 // rsi: current context (restored)
2933 // NOTE: Invocations of builtins may return failure objects
2934 // instead of a proper result. The builtin entry handles
2935 // this by performing a garbage collection and retrying the
2938 ProfileEntryHookStub::MaybeCallEntryHook(masm);
2940 // Enter the exit frame that transitions from JavaScript to C++.
2942 int arg_stack_space = (result_size_ < 2 ? 2 : 4);
2944 int arg_stack_space = 0;
2946 __ EnterExitFrame(arg_stack_space, save_doubles_);
2948 // rax: Holds the context at this point, but should not be used.
2949 // On entry to code generated by GenerateCore, it must hold
2950 // a failure result if the collect_garbage argument to GenerateCore
2951 // is true. This failure result can be the result of code
2952 // generated by a previous call to GenerateCore. The value
2953 // of rax is then passed to Runtime::PerformGC.
2954 // rbx: pointer to builtin function (C callee-saved).
2955 // rbp: frame pointer of exit frame (restored after C call).
2956 // rsp: stack pointer (restored after C call).
2957 // r14: number of arguments including receiver (C callee-saved).
2958 // r15: argv pointer (C callee-saved).
2960 Label throw_normal_exception;
2961 Label throw_termination_exception;
2962 Label throw_out_of_memory_exception;
2964 // Call into the runtime system.
2966 &throw_normal_exception,
2967 &throw_termination_exception,
2968 &throw_out_of_memory_exception,
2972 // Do space-specific GC and retry runtime call.
2974 &throw_normal_exception,
2975 &throw_termination_exception,
2976 &throw_out_of_memory_exception,
2980 // Do full GC and retry runtime call one final time.
2981 Failure* failure = Failure::InternalError();
2982 __ movq(rax, failure, RelocInfo::NONE64);
2984 &throw_normal_exception,
2985 &throw_termination_exception,
2986 &throw_out_of_memory_exception,
2990 __ bind(&throw_out_of_memory_exception);
2991 // Set external caught exception to false.
2992 Isolate* isolate = masm->isolate();
2993 ExternalReference external_caught(Isolate::kExternalCaughtExceptionAddress,
2995 __ Set(rax, static_cast<int64_t>(false));
2996 __ Store(external_caught, rax);
2998 // Set pending exception and rax to out of memory exception.
2999 ExternalReference pending_exception(Isolate::kPendingExceptionAddress,
3001 Label already_have_failure;
3002 JumpIfOOM(masm, rax, kScratchRegister, &already_have_failure);
3003 __ movq(rax, Failure::OutOfMemoryException(0x1), RelocInfo::NONE64);
3004 __ bind(&already_have_failure);
3005 __ Store(pending_exception, rax);
3006 // Fall through to the next label.
3008 __ bind(&throw_termination_exception);
3009 __ ThrowUncatchable(rax);
3011 __ bind(&throw_normal_exception);
3016 void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) {
3017 Label invoke, handler_entry, exit;
3018 Label not_outermost_js, not_outermost_js_2;
3020 ProfileEntryHookStub::MaybeCallEntryHook(masm);
3022 { // NOLINT. Scope block confuses linter.
3023 MacroAssembler::NoRootArrayScope uninitialized_root_register(masm);
3028 // Push the stack frame type marker twice.
3029 int marker = is_construct ? StackFrame::ENTRY_CONSTRUCT : StackFrame::ENTRY;
3030 // Scratch register is neither callee-save, nor an argument register on any
3031 // platform. It's free to use at this point.
3032 // Cannot use smi-register for loading yet.
3033 __ movq(kScratchRegister,
3034 reinterpret_cast<uint64_t>(Smi::FromInt(marker)),
3036 __ push(kScratchRegister); // context slot
3037 __ push(kScratchRegister); // function slot
3038 // Save callee-saved registers (X64/Win64 calling conventions).
3044 __ push(rdi); // Only callee save in Win64 ABI, argument in AMD64 ABI.
3045 __ push(rsi); // Only callee save in Win64 ABI, argument in AMD64 ABI.
3050 // On Win64 XMM6-XMM15 are callee-save
3051 __ subq(rsp, Immediate(EntryFrameConstants::kXMMRegistersBlockSize));
3052 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 0), xmm6);
3053 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 1), xmm7);
3054 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 2), xmm8);
3055 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 3), xmm9);
3056 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 4), xmm10);
3057 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 5), xmm11);
3058 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 6), xmm12);
3059 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 7), xmm13);
3060 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 8), xmm14);
3061 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 9), xmm15);
3064 // Set up the roots and smi constant registers.
3065 // Needs to be done before any further smi loads.
3066 __ InitializeSmiConstantRegister();
3067 __ InitializeRootRegister();
3070 Isolate* isolate = masm->isolate();
3072 // Save copies of the top frame descriptor on the stack.
3073 ExternalReference c_entry_fp(Isolate::kCEntryFPAddress, isolate);
3075 Operand c_entry_fp_operand = masm->ExternalOperand(c_entry_fp);
3076 __ push(c_entry_fp_operand);
3079 // If this is the outermost JS call, set js_entry_sp value.
3080 ExternalReference js_entry_sp(Isolate::kJSEntrySPAddress, isolate);
3081 __ Load(rax, js_entry_sp);
3083 __ j(not_zero, ¬_outermost_js);
3084 __ Push(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME));
3086 __ Store(js_entry_sp, rax);
3089 __ bind(¬_outermost_js);
3090 __ Push(Smi::FromInt(StackFrame::INNER_JSENTRY_FRAME));
3093 // Jump to a faked try block that does the invoke, with a faked catch
3094 // block that sets the pending exception.
3096 __ bind(&handler_entry);
3097 handler_offset_ = handler_entry.pos();
3098 // Caught exception: Store result (exception) in the pending exception
3099 // field in the JSEnv and return a failure sentinel.
3100 ExternalReference pending_exception(Isolate::kPendingExceptionAddress,
3102 __ Store(pending_exception, rax);
3103 __ movq(rax, Failure::Exception(), RelocInfo::NONE64);
3106 // Invoke: Link this frame into the handler chain. There's only one
3107 // handler block in this code object, so its index is 0.
3109 __ PushTryHandler(StackHandler::JS_ENTRY, 0);
3111 // Clear any pending exceptions.
3112 __ LoadRoot(rax, Heap::kTheHoleValueRootIndex);
3113 __ Store(pending_exception, rax);
3115 // Fake a receiver (NULL).
3116 __ push(Immediate(0)); // receiver
3118 // Invoke the function by calling through JS entry trampoline builtin and
3119 // pop the faked function when we return. We load the address from an
3120 // external reference instead of inlining the call target address directly
3121 // in the code, because the builtin stubs may not have been generated yet
3122 // at the time this code is generated.
3124 ExternalReference construct_entry(Builtins::kJSConstructEntryTrampoline,
3126 __ Load(rax, construct_entry);
3128 ExternalReference entry(Builtins::kJSEntryTrampoline, isolate);
3129 __ Load(rax, entry);
3131 __ lea(kScratchRegister, FieldOperand(rax, Code::kHeaderSize));
3132 __ call(kScratchRegister);
3134 // Unlink this frame from the handler chain.
3138 // Check if the current stack frame is marked as the outermost JS frame.
3140 __ Cmp(rbx, Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME));
3141 __ j(not_equal, ¬_outermost_js_2);
3142 __ movq(kScratchRegister, js_entry_sp);
3143 __ movq(Operand(kScratchRegister, 0), Immediate(0));
3144 __ bind(¬_outermost_js_2);
3146 // Restore the top frame descriptor from the stack.
3147 { Operand c_entry_fp_operand = masm->ExternalOperand(c_entry_fp);
3148 __ pop(c_entry_fp_operand);
3151 // Restore callee-saved registers (X64 conventions).
3153 // On Win64 XMM6-XMM15 are callee-save
3154 __ movdqu(xmm6, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 0));
3155 __ movdqu(xmm7, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 1));
3156 __ movdqu(xmm8, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 2));
3157 __ movdqu(xmm9, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 3));
3158 __ movdqu(xmm10, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 4));
3159 __ movdqu(xmm11, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 5));
3160 __ movdqu(xmm12, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 6));
3161 __ movdqu(xmm13, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 7));
3162 __ movdqu(xmm14, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 8));
3163 __ movdqu(xmm15, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 9));
3164 __ addq(rsp, Immediate(EntryFrameConstants::kXMMRegistersBlockSize));
3169 // Callee save on in Win64 ABI, arguments/volatile in AMD64 ABI.
3177 __ addq(rsp, Immediate(2 * kPointerSize)); // remove markers
3179 // Restore frame pointer and return.
3185 void InstanceofStub::Generate(MacroAssembler* masm) {
3186 // Implements "value instanceof function" operator.
3187 // Expected input state with no inline cache:
3188 // rsp[0] : return address
3189 // rsp[8] : function pointer
3191 // Expected input state with an inline one-element cache:
3192 // rsp[0] : return address
3193 // rsp[8] : offset from return address to location of inline cache
3194 // rsp[16] : function pointer
3196 // Returns a bitwise zero to indicate that the value
3197 // is and instance of the function and anything else to
3198 // indicate that the value is not an instance.
3200 static const int kOffsetToMapCheckValue = 2;
3201 static const int kOffsetToResultValue = 18;
3202 // The last 4 bytes of the instruction sequence
3203 // movq(rdi, FieldOperand(rax, HeapObject::kMapOffset))
3204 // Move(kScratchRegister, Factory::the_hole_value())
3205 // in front of the hole value address.
3206 static const unsigned int kWordBeforeMapCheckValue = 0xBA49FF78;
3207 // The last 4 bytes of the instruction sequence
3208 // __ j(not_equal, &cache_miss);
3209 // __ LoadRoot(ToRegister(instr->result()), Heap::kTheHoleValueRootIndex);
3210 // before the offset of the hole value in the root array.
3211 static const unsigned int kWordBeforeResultValue = 0x458B4909;
3212 // Only the inline check flag is supported on X64.
3213 ASSERT(flags_ == kNoFlags || HasCallSiteInlineCheck());
3214 int extra_argument_offset = HasCallSiteInlineCheck() ? 1 : 0;
3216 // Get the object - go slow case if it's a smi.
3218 StackArgumentsAccessor args(rsp, 2 + extra_argument_offset,
3219 ARGUMENTS_DONT_CONTAIN_RECEIVER);
3220 __ movq(rax, args.GetArgumentOperand(0));
3221 __ JumpIfSmi(rax, &slow);
3223 // Check that the left hand is a JS object. Leave its map in rax.
3224 __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rax);
3226 __ CmpInstanceType(rax, LAST_SPEC_OBJECT_TYPE);
3229 // Get the prototype of the function.
3230 __ movq(rdx, args.GetArgumentOperand(1));
3231 // rdx is function, rax is map.
3233 // If there is a call site cache don't look in the global cache, but do the
3234 // real lookup and update the call site cache.
3235 if (!HasCallSiteInlineCheck()) {
3236 // Look up the function and the map in the instanceof cache.
3238 __ CompareRoot(rdx, Heap::kInstanceofCacheFunctionRootIndex);
3239 __ j(not_equal, &miss, Label::kNear);
3240 __ CompareRoot(rax, Heap::kInstanceofCacheMapRootIndex);
3241 __ j(not_equal, &miss, Label::kNear);
3242 __ LoadRoot(rax, Heap::kInstanceofCacheAnswerRootIndex);
3243 __ ret(2 * kPointerSize);
3247 __ TryGetFunctionPrototype(rdx, rbx, &slow, true);
3249 // Check that the function prototype is a JS object.
3250 __ JumpIfSmi(rbx, &slow);
3251 __ CmpObjectType(rbx, FIRST_SPEC_OBJECT_TYPE, kScratchRegister);
3253 __ CmpInstanceType(kScratchRegister, LAST_SPEC_OBJECT_TYPE);
3256 // Register mapping:
3257 // rax is object map.
3259 // rbx is function prototype.
3260 if (!HasCallSiteInlineCheck()) {
3261 __ StoreRoot(rdx, Heap::kInstanceofCacheFunctionRootIndex);
3262 __ StoreRoot(rax, Heap::kInstanceofCacheMapRootIndex);
3264 // Get return address and delta to inlined map check.
3265 __ movq(kScratchRegister, StackOperandForReturnAddress(0));
3266 __ subq(kScratchRegister, args.GetArgumentOperand(2));
3267 if (FLAG_debug_code) {
3268 __ movl(rdi, Immediate(kWordBeforeMapCheckValue));
3269 __ cmpl(Operand(kScratchRegister, kOffsetToMapCheckValue - 4), rdi);
3270 __ Assert(equal, kInstanceofStubUnexpectedCallSiteCacheCheck);
3272 __ movq(kScratchRegister,
3273 Operand(kScratchRegister, kOffsetToMapCheckValue));
3274 __ movq(Operand(kScratchRegister, 0), rax);
3277 __ movq(rcx, FieldOperand(rax, Map::kPrototypeOffset));
3279 // Loop through the prototype chain looking for the function prototype.
3280 Label loop, is_instance, is_not_instance;
3281 __ LoadRoot(kScratchRegister, Heap::kNullValueRootIndex);
3284 __ j(equal, &is_instance, Label::kNear);
3285 __ cmpq(rcx, kScratchRegister);
3286 // The code at is_not_instance assumes that kScratchRegister contains a
3287 // non-zero GCable value (the null object in this case).
3288 __ j(equal, &is_not_instance, Label::kNear);
3289 __ movq(rcx, FieldOperand(rcx, HeapObject::kMapOffset));
3290 __ movq(rcx, FieldOperand(rcx, Map::kPrototypeOffset));
3293 __ bind(&is_instance);
3294 if (!HasCallSiteInlineCheck()) {
3296 // Store bitwise zero in the cache. This is a Smi in GC terms.
3297 STATIC_ASSERT(kSmiTag == 0);
3298 __ StoreRoot(rax, Heap::kInstanceofCacheAnswerRootIndex);
3300 // Store offset of true in the root array at the inline check site.
3301 int true_offset = 0x100 +
3302 (Heap::kTrueValueRootIndex << kPointerSizeLog2) - kRootRegisterBias;
3303 // Assert it is a 1-byte signed value.
3304 ASSERT(true_offset >= 0 && true_offset < 0x100);
3305 __ movl(rax, Immediate(true_offset));
3306 __ movq(kScratchRegister, StackOperandForReturnAddress(0));
3307 __ subq(kScratchRegister, args.GetArgumentOperand(2));
3308 __ movb(Operand(kScratchRegister, kOffsetToResultValue), rax);
3309 if (FLAG_debug_code) {
3310 __ movl(rax, Immediate(kWordBeforeResultValue));
3311 __ cmpl(Operand(kScratchRegister, kOffsetToResultValue - 4), rax);
3312 __ Assert(equal, kInstanceofStubUnexpectedCallSiteCacheMov);
3316 __ ret((2 + extra_argument_offset) * kPointerSize);
3318 __ bind(&is_not_instance);
3319 if (!HasCallSiteInlineCheck()) {
3320 // We have to store a non-zero value in the cache.
3321 __ StoreRoot(kScratchRegister, Heap::kInstanceofCacheAnswerRootIndex);
3323 // Store offset of false in the root array at the inline check site.
3324 int false_offset = 0x100 +
3325 (Heap::kFalseValueRootIndex << kPointerSizeLog2) - kRootRegisterBias;
3326 // Assert it is a 1-byte signed value.
3327 ASSERT(false_offset >= 0 && false_offset < 0x100);
3328 __ movl(rax, Immediate(false_offset));
3329 __ movq(kScratchRegister, StackOperandForReturnAddress(0));
3330 __ subq(kScratchRegister, args.GetArgumentOperand(2));
3331 __ movb(Operand(kScratchRegister, kOffsetToResultValue), rax);
3332 if (FLAG_debug_code) {
3333 __ movl(rax, Immediate(kWordBeforeResultValue));
3334 __ cmpl(Operand(kScratchRegister, kOffsetToResultValue - 4), rax);
3335 __ Assert(equal, kInstanceofStubUnexpectedCallSiteCacheMov);
3338 __ ret((2 + extra_argument_offset) * kPointerSize);
3340 // Slow-case: Go through the JavaScript implementation.
3342 if (HasCallSiteInlineCheck()) {
3343 // Remove extra value from the stack.
3344 __ PopReturnAddressTo(rcx);
3346 __ PushReturnAddressFrom(rcx);
3348 __ InvokeBuiltin(Builtins::INSTANCE_OF, JUMP_FUNCTION);
3352 // Passing arguments in registers is not supported.
3353 Register InstanceofStub::left() { return no_reg; }
3356 Register InstanceofStub::right() { return no_reg; }
3359 // -------------------------------------------------------------------------
3360 // StringCharCodeAtGenerator
3362 void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) {
3365 Label got_char_code;
3366 Label sliced_string;
3368 // If the receiver is a smi trigger the non-string case.
3369 __ JumpIfSmi(object_, receiver_not_string_);
3371 // Fetch the instance type of the receiver into result register.
3372 __ movq(result_, FieldOperand(object_, HeapObject::kMapOffset));
3373 __ movzxbl(result_, FieldOperand(result_, Map::kInstanceTypeOffset));
3374 // If the receiver is not a string trigger the non-string case.
3375 __ testb(result_, Immediate(kIsNotStringMask));
3376 __ j(not_zero, receiver_not_string_);
3378 // If the index is non-smi trigger the non-smi case.
3379 __ JumpIfNotSmi(index_, &index_not_smi_);
3380 __ bind(&got_smi_index_);
3382 // Check for index out of range.
3383 __ SmiCompare(index_, FieldOperand(object_, String::kLengthOffset));
3384 __ j(above_equal, index_out_of_range_);
3386 __ SmiToInteger32(index_, index_);
3388 StringCharLoadGenerator::Generate(
3389 masm, object_, index_, result_, &call_runtime_);
3391 __ Integer32ToSmi(result_, result_);
3396 void StringCharCodeAtGenerator::GenerateSlow(
3397 MacroAssembler* masm,
3398 const RuntimeCallHelper& call_helper) {
3399 __ Abort(kUnexpectedFallthroughToCharCodeAtSlowCase);
3401 Factory* factory = masm->isolate()->factory();
3402 // Index is not a smi.
3403 __ bind(&index_not_smi_);
3404 // If index is a heap number, try converting it to an integer.
3406 factory->heap_number_map(),
3409 call_helper.BeforeCall(masm);
3411 __ push(index_); // Consumed by runtime conversion function.
3412 if (index_flags_ == STRING_INDEX_IS_NUMBER) {
3413 __ CallRuntime(Runtime::kNumberToIntegerMapMinusZero, 1);
3415 ASSERT(index_flags_ == STRING_INDEX_IS_ARRAY_INDEX);
3416 // NumberToSmi discards numbers that are not exact integers.
3417 __ CallRuntime(Runtime::kNumberToSmi, 1);
3419 if (!index_.is(rax)) {
3420 // Save the conversion result before the pop instructions below
3421 // have a chance to overwrite it.
3422 __ movq(index_, rax);
3425 // Reload the instance type.
3426 __ movq(result_, FieldOperand(object_, HeapObject::kMapOffset));
3427 __ movzxbl(result_, FieldOperand(result_, Map::kInstanceTypeOffset));
3428 call_helper.AfterCall(masm);
3429 // If index is still not a smi, it must be out of range.
3430 __ JumpIfNotSmi(index_, index_out_of_range_);
3431 // Otherwise, return to the fast path.
3432 __ jmp(&got_smi_index_);
3434 // Call runtime. We get here when the receiver is a string and the
3435 // index is a number, but the code of getting the actual character
3436 // is too complex (e.g., when the string needs to be flattened).
3437 __ bind(&call_runtime_);
3438 call_helper.BeforeCall(masm);
3440 __ Integer32ToSmi(index_, index_);
3442 __ CallRuntime(Runtime::kStringCharCodeAt, 2);
3443 if (!result_.is(rax)) {
3444 __ movq(result_, rax);
3446 call_helper.AfterCall(masm);
3449 __ Abort(kUnexpectedFallthroughFromCharCodeAtSlowCase);
3453 // -------------------------------------------------------------------------
3454 // StringCharFromCodeGenerator
3456 void StringCharFromCodeGenerator::GenerateFast(MacroAssembler* masm) {
3457 // Fast case of Heap::LookupSingleCharacterStringFromCode.
3458 __ JumpIfNotSmi(code_, &slow_case_);
3459 __ SmiCompare(code_, Smi::FromInt(String::kMaxOneByteCharCode));
3460 __ j(above, &slow_case_);
3462 __ LoadRoot(result_, Heap::kSingleCharacterStringCacheRootIndex);
3463 SmiIndex index = masm->SmiToIndex(kScratchRegister, code_, kPointerSizeLog2);
3464 __ movq(result_, FieldOperand(result_, index.reg, index.scale,
3465 FixedArray::kHeaderSize));
3466 __ CompareRoot(result_, Heap::kUndefinedValueRootIndex);
3467 __ j(equal, &slow_case_);
3472 void StringCharFromCodeGenerator::GenerateSlow(
3473 MacroAssembler* masm,
3474 const RuntimeCallHelper& call_helper) {
3475 __ Abort(kUnexpectedFallthroughToCharFromCodeSlowCase);
3477 __ bind(&slow_case_);
3478 call_helper.BeforeCall(masm);
3480 __ CallRuntime(Runtime::kCharFromCode, 1);
3481 if (!result_.is(rax)) {
3482 __ movq(result_, rax);
3484 call_helper.AfterCall(masm);
3487 __ Abort(kUnexpectedFallthroughFromCharFromCodeSlowCase);
3491 void StringAddStub::Generate(MacroAssembler* masm) {
3492 Label call_runtime, call_builtin;
3493 Builtins::JavaScript builtin_id = Builtins::ADD;
3495 // Load the two arguments.
3496 StackArgumentsAccessor args(rsp, 2, ARGUMENTS_DONT_CONTAIN_RECEIVER);
3497 __ movq(rax, args.GetArgumentOperand(0)); // First argument (left).
3498 __ movq(rdx, args.GetArgumentOperand(1)); // Second argument (right).
3500 // Make sure that both arguments are strings if not known in advance.
3501 // Otherwise, at least one of the arguments is definitely a string,
3502 // and we convert the one that is not known to be a string.
3503 if ((flags_ & STRING_ADD_CHECK_BOTH) == STRING_ADD_CHECK_BOTH) {
3504 ASSERT((flags_ & STRING_ADD_CHECK_LEFT) == STRING_ADD_CHECK_LEFT);
3505 ASSERT((flags_ & STRING_ADD_CHECK_RIGHT) == STRING_ADD_CHECK_RIGHT);
3506 __ JumpIfSmi(rax, &call_runtime);
3507 __ CmpObjectType(rax, FIRST_NONSTRING_TYPE, r8);
3508 __ j(above_equal, &call_runtime);
3510 // First argument is a a string, test second.
3511 __ JumpIfSmi(rdx, &call_runtime);
3512 __ CmpObjectType(rdx, FIRST_NONSTRING_TYPE, r9);
3513 __ j(above_equal, &call_runtime);
3514 } else if ((flags_ & STRING_ADD_CHECK_LEFT) == STRING_ADD_CHECK_LEFT) {
3515 ASSERT((flags_ & STRING_ADD_CHECK_RIGHT) == 0);
3516 GenerateConvertArgument(masm, 2 * kPointerSize, rax, rbx, rcx, rdi,
3518 builtin_id = Builtins::STRING_ADD_RIGHT;
3519 } else if ((flags_ & STRING_ADD_CHECK_RIGHT) == STRING_ADD_CHECK_RIGHT) {
3520 ASSERT((flags_ & STRING_ADD_CHECK_LEFT) == 0);
3521 GenerateConvertArgument(masm, 1 * kPointerSize, rdx, rbx, rcx, rdi,
3523 builtin_id = Builtins::STRING_ADD_LEFT;
3526 // Both arguments are strings.
3527 // rax: first string
3528 // rdx: second string
3529 // Check if either of the strings are empty. In that case return the other.
3530 Label second_not_zero_length, both_not_zero_length;
3531 __ movq(rcx, FieldOperand(rdx, String::kLengthOffset));
3533 __ j(not_zero, &second_not_zero_length, Label::kNear);
3534 // Second string is empty, result is first string which is already in rax.
3535 Counters* counters = masm->isolate()->counters();
3536 __ IncrementCounter(counters->string_add_native(), 1);
3537 __ ret(2 * kPointerSize);
3538 __ bind(&second_not_zero_length);
3539 __ movq(rbx, FieldOperand(rax, String::kLengthOffset));
3541 __ j(not_zero, &both_not_zero_length, Label::kNear);
3542 // First string is empty, result is second string which is in rdx.
3544 __ IncrementCounter(counters->string_add_native(), 1);
3545 __ ret(2 * kPointerSize);
3547 // Both strings are non-empty.
3548 // rax: first string
3549 // rbx: length of first string
3550 // rcx: length of second string
3551 // rdx: second string
3552 // r8: map of first string (if flags_ == NO_STRING_ADD_FLAGS)
3553 // r9: map of second string (if flags_ == NO_STRING_ADD_FLAGS)
3554 Label string_add_flat_result, longer_than_two;
3555 __ bind(&both_not_zero_length);
3557 // If arguments where known to be strings, maps are not loaded to r8 and r9
3558 // by the code above.
3559 if ((flags_ & STRING_ADD_CHECK_BOTH) != STRING_ADD_CHECK_BOTH) {
3560 __ movq(r8, FieldOperand(rax, HeapObject::kMapOffset));
3561 __ movq(r9, FieldOperand(rdx, HeapObject::kMapOffset));
3563 // Get the instance types of the two strings as they will be needed soon.
3564 __ movzxbl(r8, FieldOperand(r8, Map::kInstanceTypeOffset));
3565 __ movzxbl(r9, FieldOperand(r9, Map::kInstanceTypeOffset));
3567 // Look at the length of the result of adding the two strings.
3568 STATIC_ASSERT(String::kMaxLength <= Smi::kMaxValue / 2);
3569 __ SmiAdd(rbx, rbx, rcx);
3570 // Use the string table when adding two one character strings, as it
3571 // helps later optimizations to return an internalized string here.
3572 __ SmiCompare(rbx, Smi::FromInt(2));
3573 __ j(not_equal, &longer_than_two);
3575 // Check that both strings are non-external ASCII strings.
3576 __ JumpIfBothInstanceTypesAreNotSequentialAscii(r8, r9, rbx, rcx,
3579 // Get the two characters forming the sub string.
3580 __ movzxbq(rbx, FieldOperand(rax, SeqOneByteString::kHeaderSize));
3581 __ movzxbq(rcx, FieldOperand(rdx, SeqOneByteString::kHeaderSize));
3583 // Try to lookup two character string in string table. If it is not found
3584 // just allocate a new one.
3585 Label make_two_character_string, make_flat_ascii_string;
3586 StringHelper::GenerateTwoCharacterStringTableProbe(
3587 masm, rbx, rcx, r14, r11, rdi, r15, &make_two_character_string);
3588 __ IncrementCounter(counters->string_add_native(), 1);
3589 __ ret(2 * kPointerSize);
3591 __ bind(&make_two_character_string);
3593 __ AllocateAsciiString(rax, rdi, r8, r9, r11, &call_runtime);
3594 // rbx - first byte: first character
3595 // rbx - second byte: *maybe* second character
3596 // Make sure that the second byte of rbx contains the second character.
3597 __ movzxbq(rcx, FieldOperand(rdx, SeqOneByteString::kHeaderSize));
3598 __ shll(rcx, Immediate(kBitsPerByte));
3600 // Write both characters to the new string.
3601 __ movw(FieldOperand(rax, SeqOneByteString::kHeaderSize), rbx);
3602 __ IncrementCounter(counters->string_add_native(), 1);
3603 __ ret(2 * kPointerSize);
3605 __ bind(&longer_than_two);
3606 // Check if resulting string will be flat.
3607 __ SmiCompare(rbx, Smi::FromInt(ConsString::kMinLength));
3608 __ j(below, &string_add_flat_result);
3609 // Handle exceptionally long strings in the runtime system.
3610 STATIC_ASSERT((String::kMaxLength & 0x80000000) == 0);
3611 __ SmiCompare(rbx, Smi::FromInt(String::kMaxLength));
3612 __ j(above, &call_runtime);
3614 // If result is not supposed to be flat, allocate a cons string object. If
3615 // both strings are ASCII the result is an ASCII cons string.
3616 // rax: first string
3617 // rbx: length of resulting flat string
3618 // rdx: second string
3619 // r8: instance type of first string
3620 // r9: instance type of second string
3621 Label non_ascii, allocated, ascii_data;
3624 STATIC_ASSERT((kStringEncodingMask & kOneByteStringTag) != 0);
3625 STATIC_ASSERT((kStringEncodingMask & kTwoByteStringTag) == 0);
3626 __ testl(rcx, Immediate(kStringEncodingMask));
3627 __ j(zero, &non_ascii);
3628 __ bind(&ascii_data);
3629 // Allocate an ASCII cons string.
3630 __ AllocateAsciiConsString(rcx, rdi, no_reg, &call_runtime);
3631 __ bind(&allocated);
3632 // Fill the fields of the cons string.
3633 __ movq(FieldOperand(rcx, ConsString::kLengthOffset), rbx);
3634 __ movq(FieldOperand(rcx, ConsString::kHashFieldOffset),
3635 Immediate(String::kEmptyHashField));
3637 Label skip_write_barrier, after_writing;
3638 ExternalReference high_promotion_mode = ExternalReference::
3639 new_space_high_promotion_mode_active_address(masm->isolate());
3640 __ Load(rbx, high_promotion_mode);
3641 __ testb(rbx, Immediate(1));
3642 __ j(zero, &skip_write_barrier);
3644 __ movq(FieldOperand(rcx, ConsString::kFirstOffset), rax);
3645 __ RecordWriteField(rcx,
3646 ConsString::kFirstOffset,
3650 __ movq(FieldOperand(rcx, ConsString::kSecondOffset), rdx);
3651 __ RecordWriteField(rcx,
3652 ConsString::kSecondOffset,
3656 __ jmp(&after_writing);
3658 __ bind(&skip_write_barrier);
3659 __ movq(FieldOperand(rcx, ConsString::kFirstOffset), rax);
3660 __ movq(FieldOperand(rcx, ConsString::kSecondOffset), rdx);
3662 __ bind(&after_writing);
3665 __ IncrementCounter(counters->string_add_native(), 1);
3666 __ ret(2 * kPointerSize);
3667 __ bind(&non_ascii);
3668 // At least one of the strings is two-byte. Check whether it happens
3669 // to contain only one byte characters.
3670 // rcx: first instance type AND second instance type.
3671 // r8: first instance type.
3672 // r9: second instance type.
3673 __ testb(rcx, Immediate(kOneByteDataHintMask));
3674 __ j(not_zero, &ascii_data);
3676 STATIC_ASSERT(kOneByteStringTag != 0 && kOneByteDataHintTag != 0);
3677 __ andb(r8, Immediate(kOneByteStringTag | kOneByteDataHintTag));
3678 __ cmpb(r8, Immediate(kOneByteStringTag | kOneByteDataHintTag));
3679 __ j(equal, &ascii_data);
3680 // Allocate a two byte cons string.
3681 __ AllocateTwoByteConsString(rcx, rdi, no_reg, &call_runtime);
3684 // We cannot encounter sliced strings or cons strings here since:
3685 STATIC_ASSERT(SlicedString::kMinLength >= ConsString::kMinLength);
3686 // Handle creating a flat result from either external or sequential strings.
3687 // Locate the first characters' locations.
3688 // rax: first string
3689 // rbx: length of resulting flat string as smi
3690 // rdx: second string
3691 // r8: instance type of first string
3692 // r9: instance type of first string
3693 Label first_prepared, second_prepared;
3694 Label first_is_sequential, second_is_sequential;
3695 __ bind(&string_add_flat_result);
3697 __ SmiToInteger32(r14, FieldOperand(rax, SeqString::kLengthOffset));
3698 // r14: length of first string
3699 STATIC_ASSERT(kSeqStringTag == 0);
3700 __ testb(r8, Immediate(kStringRepresentationMask));
3701 __ j(zero, &first_is_sequential, Label::kNear);
3702 // Rule out short external string and load string resource.
3703 STATIC_ASSERT(kShortExternalStringTag != 0);
3704 __ testb(r8, Immediate(kShortExternalStringMask));
3705 __ j(not_zero, &call_runtime);
3706 __ movq(rcx, FieldOperand(rax, ExternalString::kResourceDataOffset));
3707 __ jmp(&first_prepared, Label::kNear);
3708 __ bind(&first_is_sequential);
3709 STATIC_ASSERT(SeqOneByteString::kHeaderSize == SeqTwoByteString::kHeaderSize);
3710 __ lea(rcx, FieldOperand(rax, SeqOneByteString::kHeaderSize));
3711 __ bind(&first_prepared);
3713 // Check whether both strings have same encoding.
3715 __ testb(r8, Immediate(kStringEncodingMask));
3716 __ j(not_zero, &call_runtime);
3718 __ SmiToInteger32(r15, FieldOperand(rdx, SeqString::kLengthOffset));
3719 // r15: length of second string
3720 STATIC_ASSERT(kSeqStringTag == 0);
3721 __ testb(r9, Immediate(kStringRepresentationMask));
3722 __ j(zero, &second_is_sequential, Label::kNear);
3723 // Rule out short external string and load string resource.
3724 STATIC_ASSERT(kShortExternalStringTag != 0);
3725 __ testb(r9, Immediate(kShortExternalStringMask));
3726 __ j(not_zero, &call_runtime);
3727 __ movq(rdx, FieldOperand(rdx, ExternalString::kResourceDataOffset));
3728 __ jmp(&second_prepared, Label::kNear);
3729 __ bind(&second_is_sequential);
3730 STATIC_ASSERT(SeqOneByteString::kHeaderSize == SeqTwoByteString::kHeaderSize);
3731 __ lea(rdx, FieldOperand(rdx, SeqOneByteString::kHeaderSize));
3732 __ bind(&second_prepared);
3734 Label non_ascii_string_add_flat_result;
3735 // r9: instance type of second string
3736 // First string and second string have the same encoding.
3737 STATIC_ASSERT(kTwoByteStringTag == 0);
3738 __ SmiToInteger32(rbx, rbx);
3739 __ testb(r9, Immediate(kStringEncodingMask));
3740 __ j(zero, &non_ascii_string_add_flat_result);
3742 __ bind(&make_flat_ascii_string);
3743 // Both strings are ASCII strings. As they are short they are both flat.
3744 __ AllocateAsciiString(rax, rbx, rdi, r8, r9, &call_runtime);
3745 // rax: result string
3746 // Locate first character of result.
3747 __ lea(rbx, FieldOperand(rax, SeqOneByteString::kHeaderSize));
3748 // rcx: first char of first string
3749 // rbx: first character of result
3750 // r14: length of first string
3751 StringHelper::GenerateCopyCharacters(masm, rbx, rcx, r14, true);
3752 // rbx: next character of result
3753 // rdx: first char of second string
3754 // r15: length of second string
3755 StringHelper::GenerateCopyCharacters(masm, rbx, rdx, r15, true);
3756 __ IncrementCounter(counters->string_add_native(), 1);
3757 __ ret(2 * kPointerSize);
3759 __ bind(&non_ascii_string_add_flat_result);
3760 // Both strings are ASCII strings. As they are short they are both flat.
3761 __ AllocateTwoByteString(rax, rbx, rdi, r8, r9, &call_runtime);
3762 // rax: result string
3763 // Locate first character of result.
3764 __ lea(rbx, FieldOperand(rax, SeqTwoByteString::kHeaderSize));
3765 // rcx: first char of first string
3766 // rbx: first character of result
3767 // r14: length of first string
3768 StringHelper::GenerateCopyCharacters(masm, rbx, rcx, r14, false);
3769 // rbx: next character of result
3770 // rdx: first char of second string
3771 // r15: length of second string
3772 StringHelper::GenerateCopyCharacters(masm, rbx, rdx, r15, false);
3773 __ IncrementCounter(counters->string_add_native(), 1);
3774 __ ret(2 * kPointerSize);
3776 // Just jump to runtime to add the two strings.
3777 __ bind(&call_runtime);
3778 __ TailCallRuntime(Runtime::kStringAdd, 2, 1);
3780 if (call_builtin.is_linked()) {
3781 __ bind(&call_builtin);
3782 __ InvokeBuiltin(builtin_id, JUMP_FUNCTION);
3787 void StringAddStub::GenerateRegisterArgsPush(MacroAssembler* masm) {
3793 void StringAddStub::GenerateRegisterArgsPop(MacroAssembler* masm,
3795 __ PopReturnAddressTo(temp);
3798 __ PushReturnAddressFrom(temp);
3802 void StringAddStub::GenerateConvertArgument(MacroAssembler* masm,
3809 // First check if the argument is already a string.
3810 Label not_string, done;
3811 __ JumpIfSmi(arg, ¬_string);
3812 __ CmpObjectType(arg, FIRST_NONSTRING_TYPE, scratch1);
3815 // Check the number to string cache.
3816 __ bind(¬_string);
3817 // Puts the cached result into scratch1.
3818 __ LookupNumberStringCache(arg, scratch1, scratch2, scratch3, slow);
3819 __ movq(arg, scratch1);
3820 __ movq(Operand(rsp, stack_offset), arg);
3825 void StringHelper::GenerateCopyCharacters(MacroAssembler* masm,
3832 // This loop just copies one character at a time, as it is only used for very
3835 __ movb(kScratchRegister, Operand(src, 0));
3836 __ movb(Operand(dest, 0), kScratchRegister);
3840 __ movzxwl(kScratchRegister, Operand(src, 0));
3841 __ movw(Operand(dest, 0), kScratchRegister);
3842 __ addq(src, Immediate(2));
3843 __ addq(dest, Immediate(2));
3846 __ j(not_zero, &loop);
3850 void StringHelper::GenerateCopyCharactersREP(MacroAssembler* masm,
3855 // Copy characters using rep movs of doublewords. Align destination on 4 byte
3856 // boundary before starting rep movs. Copy remaining characters after running
3858 // Count is positive int32, dest and src are character pointers.
3859 ASSERT(dest.is(rdi)); // rep movs destination
3860 ASSERT(src.is(rsi)); // rep movs source
3861 ASSERT(count.is(rcx)); // rep movs count
3863 // Nothing to do for zero characters.
3865 __ testl(count, count);
3866 __ j(zero, &done, Label::kNear);
3868 // Make count the number of bytes to copy.
3870 STATIC_ASSERT(2 == sizeof(uc16));
3871 __ addl(count, count);
3874 // Don't enter the rep movs if there are less than 4 bytes to copy.
3876 __ testl(count, Immediate(~(kPointerSize - 1)));
3877 __ j(zero, &last_bytes, Label::kNear);
3879 // Copy from edi to esi using rep movs instruction.
3880 __ movl(kScratchRegister, count);
3881 __ shr(count, Immediate(kPointerSizeLog2)); // Number of doublewords to copy.
3884 // Find number of bytes left.
3885 __ movl(count, kScratchRegister);
3886 __ and_(count, Immediate(kPointerSize - 1));
3888 // Check if there are more bytes to copy.
3889 __ bind(&last_bytes);
3890 __ testl(count, count);
3891 __ j(zero, &done, Label::kNear);
3893 // Copy remaining characters.
3896 __ movb(kScratchRegister, Operand(src, 0));
3897 __ movb(Operand(dest, 0), kScratchRegister);
3901 __ j(not_zero, &loop);
3906 void StringHelper::GenerateTwoCharacterStringTableProbe(MacroAssembler* masm,
3914 // Register scratch3 is the general scratch register in this function.
3915 Register scratch = scratch3;
3917 // Make sure that both characters are not digits as such strings has a
3918 // different hash algorithm. Don't try to look for these in the string table.
3919 Label not_array_index;
3920 __ leal(scratch, Operand(c1, -'0'));
3921 __ cmpl(scratch, Immediate(static_cast<int>('9' - '0')));
3922 __ j(above, ¬_array_index, Label::kNear);
3923 __ leal(scratch, Operand(c2, -'0'));
3924 __ cmpl(scratch, Immediate(static_cast<int>('9' - '0')));
3925 __ j(below_equal, not_found);
3927 __ bind(¬_array_index);
3928 // Calculate the two character string hash.
3929 Register hash = scratch1;
3930 GenerateHashInit(masm, hash, c1, scratch);
3931 GenerateHashAddCharacter(masm, hash, c2, scratch);
3932 GenerateHashGetHash(masm, hash, scratch);
3934 // Collect the two characters in a register.
3935 Register chars = c1;
3936 __ shl(c2, Immediate(kBitsPerByte));
3939 // chars: two character string, char 1 in byte 0 and char 2 in byte 1.
3940 // hash: hash of two character string.
3942 // Load the string table.
3943 Register string_table = c2;
3944 __ LoadRoot(string_table, Heap::kStringTableRootIndex);
3946 // Calculate capacity mask from the string table capacity.
3947 Register mask = scratch2;
3948 __ SmiToInteger32(mask,
3949 FieldOperand(string_table, StringTable::kCapacityOffset));
3952 Register map = scratch4;
3955 // chars: two character string, char 1 in byte 0 and char 2 in byte 1.
3956 // hash: hash of two character string (32-bit int)
3957 // string_table: string table
3958 // mask: capacity mask (32-bit int)
3962 // Perform a number of probes in the string table.
3963 static const int kProbes = 4;
3964 Label found_in_string_table;
3965 Label next_probe[kProbes];
3966 Register candidate = scratch; // Scratch register contains candidate.
3967 for (int i = 0; i < kProbes; i++) {
3968 // Calculate entry in string table.
3969 __ movl(scratch, hash);
3971 __ addl(scratch, Immediate(StringTable::GetProbeOffset(i)));
3973 __ andl(scratch, mask);
3975 // Load the entry from the string table.
3976 STATIC_ASSERT(StringTable::kEntrySize == 1);
3978 FieldOperand(string_table,
3981 StringTable::kElementsStartOffset));
3983 // If entry is undefined no string with this hash can be found.
3985 __ CmpObjectType(candidate, ODDBALL_TYPE, map);
3986 __ j(not_equal, &is_string, Label::kNear);
3988 __ CompareRoot(candidate, Heap::kUndefinedValueRootIndex);
3989 __ j(equal, not_found);
3990 // Must be the hole (deleted entry).
3991 if (FLAG_debug_code) {
3992 __ LoadRoot(kScratchRegister, Heap::kTheHoleValueRootIndex);
3993 __ cmpq(kScratchRegister, candidate);
3994 __ Assert(equal, kOddballInStringTableIsNotUndefinedOrTheHole);
3996 __ jmp(&next_probe[i]);
3998 __ bind(&is_string);
4000 // If length is not 2 the string is not a candidate.
4001 __ SmiCompare(FieldOperand(candidate, String::kLengthOffset),
4003 __ j(not_equal, &next_probe[i]);
4005 // We use kScratchRegister as a temporary register in assumption that
4006 // JumpIfInstanceTypeIsNotSequentialAscii does not use it implicitly
4007 Register temp = kScratchRegister;
4009 // Check that the candidate is a non-external ASCII string.
4010 __ movzxbl(temp, FieldOperand(map, Map::kInstanceTypeOffset));
4011 __ JumpIfInstanceTypeIsNotSequentialAscii(
4012 temp, temp, &next_probe[i]);
4014 // Check if the two characters match.
4015 __ movl(temp, FieldOperand(candidate, SeqOneByteString::kHeaderSize));
4016 __ andl(temp, Immediate(0x0000ffff));
4017 __ cmpl(chars, temp);
4018 __ j(equal, &found_in_string_table);
4019 __ bind(&next_probe[i]);
4022 // No matching 2 character string found by probing.
4025 // Scratch register contains result when we fall through to here.
4026 Register result = candidate;
4027 __ bind(&found_in_string_table);
4028 if (!result.is(rax)) {
4029 __ movq(rax, result);
4034 void StringHelper::GenerateHashInit(MacroAssembler* masm,
4038 // hash = (seed + character) + ((seed + character) << 10);
4039 __ LoadRoot(scratch, Heap::kHashSeedRootIndex);
4040 __ SmiToInteger32(scratch, scratch);
4041 __ addl(scratch, character);
4042 __ movl(hash, scratch);
4043 __ shll(scratch, Immediate(10));
4044 __ addl(hash, scratch);
4045 // hash ^= hash >> 6;
4046 __ movl(scratch, hash);
4047 __ shrl(scratch, Immediate(6));
4048 __ xorl(hash, scratch);
4052 void StringHelper::GenerateHashAddCharacter(MacroAssembler* masm,
4056 // hash += character;
4057 __ addl(hash, character);
4058 // hash += hash << 10;
4059 __ movl(scratch, hash);
4060 __ shll(scratch, Immediate(10));
4061 __ addl(hash, scratch);
4062 // hash ^= hash >> 6;
4063 __ movl(scratch, hash);
4064 __ shrl(scratch, Immediate(6));
4065 __ xorl(hash, scratch);
4069 void StringHelper::GenerateHashGetHash(MacroAssembler* masm,
4072 // hash += hash << 3;
4073 __ leal(hash, Operand(hash, hash, times_8, 0));
4074 // hash ^= hash >> 11;
4075 __ movl(scratch, hash);
4076 __ shrl(scratch, Immediate(11));
4077 __ xorl(hash, scratch);
4078 // hash += hash << 15;
4079 __ movl(scratch, hash);
4080 __ shll(scratch, Immediate(15));
4081 __ addl(hash, scratch);
4083 __ andl(hash, Immediate(String::kHashBitMask));
4085 // if (hash == 0) hash = 27;
4086 Label hash_not_zero;
4087 __ j(not_zero, &hash_not_zero);
4088 __ Set(hash, StringHasher::kZeroHash);
4089 __ bind(&hash_not_zero);
4093 void SubStringStub::Generate(MacroAssembler* masm) {
4096 // Stack frame on entry.
4097 // rsp[0] : return address
4102 enum SubStringStubArgumentIndices {
4103 STRING_ARGUMENT_INDEX,
4104 FROM_ARGUMENT_INDEX,
4106 SUB_STRING_ARGUMENT_COUNT
4109 StackArgumentsAccessor args(rsp, SUB_STRING_ARGUMENT_COUNT,
4110 ARGUMENTS_DONT_CONTAIN_RECEIVER);
4112 // Make sure first argument is a string.
4113 __ movq(rax, args.GetArgumentOperand(STRING_ARGUMENT_INDEX));
4114 STATIC_ASSERT(kSmiTag == 0);
4115 __ testl(rax, Immediate(kSmiTagMask));
4116 __ j(zero, &runtime);
4117 Condition is_string = masm->IsObjectStringType(rax, rbx, rbx);
4118 __ j(NegateCondition(is_string), &runtime);
4121 // rbx: instance type
4122 // Calculate length of sub string using the smi values.
4123 __ movq(rcx, args.GetArgumentOperand(TO_ARGUMENT_INDEX));
4124 __ movq(rdx, args.GetArgumentOperand(FROM_ARGUMENT_INDEX));
4125 __ JumpUnlessBothNonNegativeSmi(rcx, rdx, &runtime);
4127 __ SmiSub(rcx, rcx, rdx); // Overflow doesn't happen.
4128 __ cmpq(rcx, FieldOperand(rax, String::kLengthOffset));
4129 Label not_original_string;
4130 // Shorter than original string's length: an actual substring.
4131 __ j(below, ¬_original_string, Label::kNear);
4132 // Longer than original string's length or negative: unsafe arguments.
4133 __ j(above, &runtime);
4134 // Return original string.
4135 Counters* counters = masm->isolate()->counters();
4136 __ IncrementCounter(counters->sub_string_native(), 1);
4137 __ ret(SUB_STRING_ARGUMENT_COUNT * kPointerSize);
4138 __ bind(¬_original_string);
4141 __ SmiCompare(rcx, Smi::FromInt(1));
4142 __ j(equal, &single_char);
4144 __ SmiToInteger32(rcx, rcx);
4147 // rbx: instance type
4148 // rcx: sub string length
4149 // rdx: from index (smi)
4150 // Deal with different string types: update the index if necessary
4151 // and put the underlying string into edi.
4152 Label underlying_unpacked, sliced_string, seq_or_external_string;
4153 // If the string is not indirect, it can only be sequential or external.
4154 STATIC_ASSERT(kIsIndirectStringMask == (kSlicedStringTag & kConsStringTag));
4155 STATIC_ASSERT(kIsIndirectStringMask != 0);
4156 __ testb(rbx, Immediate(kIsIndirectStringMask));
4157 __ j(zero, &seq_or_external_string, Label::kNear);
4159 __ testb(rbx, Immediate(kSlicedNotConsMask));
4160 __ j(not_zero, &sliced_string, Label::kNear);
4161 // Cons string. Check whether it is flat, then fetch first part.
4162 // Flat cons strings have an empty second part.
4163 __ CompareRoot(FieldOperand(rax, ConsString::kSecondOffset),
4164 Heap::kempty_stringRootIndex);
4165 __ j(not_equal, &runtime);
4166 __ movq(rdi, FieldOperand(rax, ConsString::kFirstOffset));
4167 // Update instance type.
4168 __ movq(rbx, FieldOperand(rdi, HeapObject::kMapOffset));
4169 __ movzxbl(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset));
4170 __ jmp(&underlying_unpacked, Label::kNear);
4172 __ bind(&sliced_string);
4173 // Sliced string. Fetch parent and correct start index by offset.
4174 __ addq(rdx, FieldOperand(rax, SlicedString::kOffsetOffset));
4175 __ movq(rdi, FieldOperand(rax, SlicedString::kParentOffset));
4176 // Update instance type.
4177 __ movq(rbx, FieldOperand(rdi, HeapObject::kMapOffset));
4178 __ movzxbl(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset));
4179 __ jmp(&underlying_unpacked, Label::kNear);
4181 __ bind(&seq_or_external_string);
4182 // Sequential or external string. Just move string to the correct register.
4185 __ bind(&underlying_unpacked);
4187 if (FLAG_string_slices) {
4189 // rdi: underlying subject string
4190 // rbx: instance type of underlying subject string
4191 // rdx: adjusted start index (smi)
4193 // If coming from the make_two_character_string path, the string
4194 // is too short to be sliced anyways.
4195 __ cmpq(rcx, Immediate(SlicedString::kMinLength));
4196 // Short slice. Copy instead of slicing.
4197 __ j(less, ©_routine);
4198 // Allocate new sliced string. At this point we do not reload the instance
4199 // type including the string encoding because we simply rely on the info
4200 // provided by the original string. It does not matter if the original
4201 // string's encoding is wrong because we always have to recheck encoding of
4202 // the newly created string's parent anyways due to externalized strings.
4203 Label two_byte_slice, set_slice_header;
4204 STATIC_ASSERT((kStringEncodingMask & kOneByteStringTag) != 0);
4205 STATIC_ASSERT((kStringEncodingMask & kTwoByteStringTag) == 0);
4206 __ testb(rbx, Immediate(kStringEncodingMask));
4207 // Make long jumps when allocations tracking is on due to
4208 // RecordObjectAllocation inside MacroAssembler::Allocate.
4209 Label::Distance jump_distance =
4210 masm->isolate()->heap_profiler()->is_tracking_allocations()
4213 __ j(zero, &two_byte_slice, jump_distance);
4214 __ AllocateAsciiSlicedString(rax, rbx, r14, &runtime);
4215 __ jmp(&set_slice_header, jump_distance);
4216 __ bind(&two_byte_slice);
4217 __ AllocateTwoByteSlicedString(rax, rbx, r14, &runtime);
4218 __ bind(&set_slice_header);
4219 __ Integer32ToSmi(rcx, rcx);
4220 __ movq(FieldOperand(rax, SlicedString::kLengthOffset), rcx);
4221 __ movq(FieldOperand(rax, SlicedString::kHashFieldOffset),
4222 Immediate(String::kEmptyHashField));
4223 __ movq(FieldOperand(rax, SlicedString::kParentOffset), rdi);
4224 __ movq(FieldOperand(rax, SlicedString::kOffsetOffset), rdx);
4225 __ IncrementCounter(counters->sub_string_native(), 1);
4226 __ ret(3 * kPointerSize);
4228 __ bind(©_routine);
4231 // rdi: underlying subject string
4232 // rbx: instance type of underlying subject string
4233 // rdx: adjusted start index (smi)
4235 // The subject string can only be external or sequential string of either
4236 // encoding at this point.
4237 Label two_byte_sequential, sequential_string;
4238 STATIC_ASSERT(kExternalStringTag != 0);
4239 STATIC_ASSERT(kSeqStringTag == 0);
4240 __ testb(rbx, Immediate(kExternalStringTag));
4241 __ j(zero, &sequential_string);
4243 // Handle external string.
4244 // Rule out short external strings.
4245 STATIC_CHECK(kShortExternalStringTag != 0);
4246 __ testb(rbx, Immediate(kShortExternalStringMask));
4247 __ j(not_zero, &runtime);
4248 __ movq(rdi, FieldOperand(rdi, ExternalString::kResourceDataOffset));
4249 // Move the pointer so that offset-wise, it looks like a sequential string.
4250 STATIC_ASSERT(SeqTwoByteString::kHeaderSize == SeqOneByteString::kHeaderSize);
4251 __ subq(rdi, Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
4253 __ bind(&sequential_string);
4254 STATIC_ASSERT((kOneByteStringTag & kStringEncodingMask) != 0);
4255 __ testb(rbx, Immediate(kStringEncodingMask));
4256 __ j(zero, &two_byte_sequential);
4258 // Allocate the result.
4259 __ AllocateAsciiString(rax, rcx, r11, r14, r15, &runtime);
4261 // rax: result string
4262 // rcx: result string length
4263 __ movq(r14, rsi); // esi used by following code.
4264 { // Locate character of sub string start.
4265 SmiIndex smi_as_index = masm->SmiToIndex(rdx, rdx, times_1);
4266 __ lea(rsi, Operand(rdi, smi_as_index.reg, smi_as_index.scale,
4267 SeqOneByteString::kHeaderSize - kHeapObjectTag));
4269 // Locate first character of result.
4270 __ lea(rdi, FieldOperand(rax, SeqOneByteString::kHeaderSize));
4272 // rax: result string
4273 // rcx: result length
4274 // rdi: first character of result
4275 // rsi: character of sub string start
4276 // r14: original value of rsi
4277 StringHelper::GenerateCopyCharactersREP(masm, rdi, rsi, rcx, true);
4278 __ movq(rsi, r14); // Restore rsi.
4279 __ IncrementCounter(counters->sub_string_native(), 1);
4280 __ ret(SUB_STRING_ARGUMENT_COUNT * kPointerSize);
4282 __ bind(&two_byte_sequential);
4283 // Allocate the result.
4284 __ AllocateTwoByteString(rax, rcx, r11, r14, r15, &runtime);
4286 // rax: result string
4287 // rcx: result string length
4288 __ movq(r14, rsi); // esi used by following code.
4289 { // Locate character of sub string start.
4290 SmiIndex smi_as_index = masm->SmiToIndex(rdx, rdx, times_2);
4291 __ lea(rsi, Operand(rdi, smi_as_index.reg, smi_as_index.scale,
4292 SeqOneByteString::kHeaderSize - kHeapObjectTag));
4294 // Locate first character of result.
4295 __ lea(rdi, FieldOperand(rax, SeqTwoByteString::kHeaderSize));
4297 // rax: result string
4298 // rcx: result length
4299 // rdi: first character of result
4300 // rsi: character of sub string start
4301 // r14: original value of rsi
4302 StringHelper::GenerateCopyCharactersREP(masm, rdi, rsi, rcx, false);
4303 __ movq(rsi, r14); // Restore esi.
4304 __ IncrementCounter(counters->sub_string_native(), 1);
4305 __ ret(SUB_STRING_ARGUMENT_COUNT * kPointerSize);
4307 // Just jump to runtime to create the sub string.
4309 __ TailCallRuntime(Runtime::kSubString, 3, 1);
4311 __ bind(&single_char);
4313 // rbx: instance type
4314 // rcx: sub string length (smi)
4315 // rdx: from index (smi)
4316 StringCharAtGenerator generator(
4317 rax, rdx, rcx, rax, &runtime, &runtime, &runtime, STRING_INDEX_IS_NUMBER);
4318 generator.GenerateFast(masm);
4319 __ ret(SUB_STRING_ARGUMENT_COUNT * kPointerSize);
4320 generator.SkipSlow(masm, &runtime);
4324 void StringCompareStub::GenerateFlatAsciiStringEquals(MacroAssembler* masm,
4328 Register scratch2) {
4329 Register length = scratch1;
4332 Label check_zero_length;
4333 __ movq(length, FieldOperand(left, String::kLengthOffset));
4334 __ SmiCompare(length, FieldOperand(right, String::kLengthOffset));
4335 __ j(equal, &check_zero_length, Label::kNear);
4336 __ Move(rax, Smi::FromInt(NOT_EQUAL));
4339 // Check if the length is zero.
4340 Label compare_chars;
4341 __ bind(&check_zero_length);
4342 STATIC_ASSERT(kSmiTag == 0);
4344 __ j(not_zero, &compare_chars, Label::kNear);
4345 __ Move(rax, Smi::FromInt(EQUAL));
4348 // Compare characters.
4349 __ bind(&compare_chars);
4350 Label strings_not_equal;
4351 GenerateAsciiCharsCompareLoop(masm, left, right, length, scratch2,
4352 &strings_not_equal, Label::kNear);
4354 // Characters are equal.
4355 __ Move(rax, Smi::FromInt(EQUAL));
4358 // Characters are not equal.
4359 __ bind(&strings_not_equal);
4360 __ Move(rax, Smi::FromInt(NOT_EQUAL));
4365 void StringCompareStub::GenerateCompareFlatAsciiStrings(MacroAssembler* masm,
4371 Register scratch4) {
4372 // Ensure that you can always subtract a string length from a non-negative
4373 // number (e.g. another length).
4374 STATIC_ASSERT(String::kMaxLength < 0x7fffffff);
4376 // Find minimum length and length difference.
4377 __ movq(scratch1, FieldOperand(left, String::kLengthOffset));
4378 __ movq(scratch4, scratch1);
4381 FieldOperand(right, String::kLengthOffset));
4382 // Register scratch4 now holds left.length - right.length.
4383 const Register length_difference = scratch4;
4385 __ j(less, &left_shorter, Label::kNear);
4386 // The right string isn't longer that the left one.
4387 // Get the right string's length by subtracting the (non-negative) difference
4388 // from the left string's length.
4389 __ SmiSub(scratch1, scratch1, length_difference);
4390 __ bind(&left_shorter);
4391 // Register scratch1 now holds Min(left.length, right.length).
4392 const Register min_length = scratch1;
4394 Label compare_lengths;
4395 // If min-length is zero, go directly to comparing lengths.
4396 __ SmiTest(min_length);
4397 __ j(zero, &compare_lengths, Label::kNear);
4400 Label result_not_equal;
4401 GenerateAsciiCharsCompareLoop(masm, left, right, min_length, scratch2,
4402 &result_not_equal, Label::kNear);
4404 // Completed loop without finding different characters.
4405 // Compare lengths (precomputed).
4406 __ bind(&compare_lengths);
4407 __ SmiTest(length_difference);
4408 Label length_not_equal;
4409 __ j(not_zero, &length_not_equal, Label::kNear);
4412 __ Move(rax, Smi::FromInt(EQUAL));
4415 Label result_greater;
4417 __ bind(&length_not_equal);
4418 __ j(greater, &result_greater, Label::kNear);
4419 __ jmp(&result_less, Label::kNear);
4420 __ bind(&result_not_equal);
4421 // Unequal comparison of left to right, either character or length.
4422 __ j(above, &result_greater, Label::kNear);
4423 __ bind(&result_less);
4426 __ Move(rax, Smi::FromInt(LESS));
4429 // Result is GREATER.
4430 __ bind(&result_greater);
4431 __ Move(rax, Smi::FromInt(GREATER));
4436 void StringCompareStub::GenerateAsciiCharsCompareLoop(
4437 MacroAssembler* masm,
4442 Label* chars_not_equal,
4443 Label::Distance near_jump) {
4444 // Change index to run from -length to -1 by adding length to string
4445 // start. This means that loop ends when index reaches zero, which
4446 // doesn't need an additional compare.
4447 __ SmiToInteger32(length, length);
4449 FieldOperand(left, length, times_1, SeqOneByteString::kHeaderSize));
4451 FieldOperand(right, length, times_1, SeqOneByteString::kHeaderSize));
4453 Register index = length; // index = -length;
4458 __ movb(scratch, Operand(left, index, times_1, 0));
4459 __ cmpb(scratch, Operand(right, index, times_1, 0));
4460 __ j(not_equal, chars_not_equal, near_jump);
4462 __ j(not_zero, &loop);
4466 void StringCompareStub::Generate(MacroAssembler* masm) {
4469 // Stack frame on entry.
4470 // rsp[0] : return address
4471 // rsp[8] : right string
4472 // rsp[16] : left string
4474 StackArgumentsAccessor args(rsp, 2, ARGUMENTS_DONT_CONTAIN_RECEIVER);
4475 __ movq(rdx, args.GetArgumentOperand(0)); // left
4476 __ movq(rax, args.GetArgumentOperand(1)); // right
4478 // Check for identity.
4481 __ j(not_equal, ¬_same, Label::kNear);
4482 __ Move(rax, Smi::FromInt(EQUAL));
4483 Counters* counters = masm->isolate()->counters();
4484 __ IncrementCounter(counters->string_compare_native(), 1);
4485 __ ret(2 * kPointerSize);
4489 // Check that both are sequential ASCII strings.
4490 __ JumpIfNotBothSequentialAsciiStrings(rdx, rax, rcx, rbx, &runtime);
4492 // Inline comparison of ASCII strings.
4493 __ IncrementCounter(counters->string_compare_native(), 1);
4494 // Drop arguments from the stack
4495 __ PopReturnAddressTo(rcx);
4496 __ addq(rsp, Immediate(2 * kPointerSize));
4497 __ PushReturnAddressFrom(rcx);
4498 GenerateCompareFlatAsciiStrings(masm, rdx, rax, rcx, rbx, rdi, r8);
4500 // Call the runtime; it returns -1 (less), 0 (equal), or 1 (greater)
4501 // tagged as a small integer.
4503 __ TailCallRuntime(Runtime::kStringCompare, 2, 1);
4507 void ICCompareStub::GenerateSmis(MacroAssembler* masm) {
4508 ASSERT(state_ == CompareIC::SMI);
4510 __ JumpIfNotBothSmi(rdx, rax, &miss, Label::kNear);
4512 if (GetCondition() == equal) {
4513 // For equality we do not care about the sign of the result.
4518 __ j(no_overflow, &done, Label::kNear);
4519 // Correct sign of result in case of overflow.
4531 void ICCompareStub::GenerateNumbers(MacroAssembler* masm) {
4532 ASSERT(state_ == CompareIC::NUMBER);
4535 Label unordered, maybe_undefined1, maybe_undefined2;
4538 if (left_ == CompareIC::SMI) {
4539 __ JumpIfNotSmi(rdx, &miss);
4541 if (right_ == CompareIC::SMI) {
4542 __ JumpIfNotSmi(rax, &miss);
4545 // Load left and right operand.
4546 Label done, left, left_smi, right_smi;
4547 __ JumpIfSmi(rax, &right_smi, Label::kNear);
4548 __ CompareMap(rax, masm->isolate()->factory()->heap_number_map(), NULL);
4549 __ j(not_equal, &maybe_undefined1, Label::kNear);
4550 __ movsd(xmm1, FieldOperand(rax, HeapNumber::kValueOffset));
4551 __ jmp(&left, Label::kNear);
4552 __ bind(&right_smi);
4553 __ SmiToInteger32(rcx, rax); // Can't clobber rax yet.
4554 __ Cvtlsi2sd(xmm1, rcx);
4557 __ JumpIfSmi(rdx, &left_smi, Label::kNear);
4558 __ CompareMap(rdx, masm->isolate()->factory()->heap_number_map(), NULL);
4559 __ j(not_equal, &maybe_undefined2, Label::kNear);
4560 __ movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset));
4563 __ SmiToInteger32(rcx, rdx); // Can't clobber rdx yet.
4564 __ Cvtlsi2sd(xmm0, rcx);
4568 __ ucomisd(xmm0, xmm1);
4570 // Don't base result on EFLAGS when a NaN is involved.
4571 __ j(parity_even, &unordered, Label::kNear);
4573 // Return a result of -1, 0, or 1, based on EFLAGS.
4574 // Performing mov, because xor would destroy the flag register.
4575 __ movl(rax, Immediate(0));
4576 __ movl(rcx, Immediate(0));
4577 __ setcc(above, rax); // Add one to zero if carry clear and not equal.
4578 __ sbbq(rax, rcx); // Subtract one if below (aka. carry set).
4581 __ bind(&unordered);
4582 __ bind(&generic_stub);
4583 ICCompareStub stub(op_, CompareIC::GENERIC, CompareIC::GENERIC,
4584 CompareIC::GENERIC);
4585 __ jmp(stub.GetCode(masm->isolate()), RelocInfo::CODE_TARGET);
4587 __ bind(&maybe_undefined1);
4588 if (Token::IsOrderedRelationalCompareOp(op_)) {
4589 __ Cmp(rax, masm->isolate()->factory()->undefined_value());
4590 __ j(not_equal, &miss);
4591 __ JumpIfSmi(rdx, &unordered);
4592 __ CmpObjectType(rdx, HEAP_NUMBER_TYPE, rcx);
4593 __ j(not_equal, &maybe_undefined2, Label::kNear);
4597 __ bind(&maybe_undefined2);
4598 if (Token::IsOrderedRelationalCompareOp(op_)) {
4599 __ Cmp(rdx, masm->isolate()->factory()->undefined_value());
4600 __ j(equal, &unordered);
4608 void ICCompareStub::GenerateInternalizedStrings(MacroAssembler* masm) {
4609 ASSERT(state_ == CompareIC::INTERNALIZED_STRING);
4610 ASSERT(GetCondition() == equal);
4612 // Registers containing left and right operands respectively.
4613 Register left = rdx;
4614 Register right = rax;
4615 Register tmp1 = rcx;
4616 Register tmp2 = rbx;
4618 // Check that both operands are heap objects.
4620 Condition cond = masm->CheckEitherSmi(left, right, tmp1);
4621 __ j(cond, &miss, Label::kNear);
4623 // Check that both operands are internalized strings.
4624 __ movq(tmp1, FieldOperand(left, HeapObject::kMapOffset));
4625 __ movq(tmp2, FieldOperand(right, HeapObject::kMapOffset));
4626 __ movzxbq(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset));
4627 __ movzxbq(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset));
4628 STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0);
4630 __ testb(tmp1, Immediate(kIsNotStringMask | kIsNotInternalizedMask));
4631 __ j(not_zero, &miss, Label::kNear);
4633 // Internalized strings are compared by identity.
4635 __ cmpq(left, right);
4636 // Make sure rax is non-zero. At this point input operands are
4637 // guaranteed to be non-zero.
4638 ASSERT(right.is(rax));
4639 __ j(not_equal, &done, Label::kNear);
4640 STATIC_ASSERT(EQUAL == 0);
4641 STATIC_ASSERT(kSmiTag == 0);
4642 __ Move(rax, Smi::FromInt(EQUAL));
4651 void ICCompareStub::GenerateUniqueNames(MacroAssembler* masm) {
4652 ASSERT(state_ == CompareIC::UNIQUE_NAME);
4653 ASSERT(GetCondition() == equal);
4655 // Registers containing left and right operands respectively.
4656 Register left = rdx;
4657 Register right = rax;
4658 Register tmp1 = rcx;
4659 Register tmp2 = rbx;
4661 // Check that both operands are heap objects.
4663 Condition cond = masm->CheckEitherSmi(left, right, tmp1);
4664 __ j(cond, &miss, Label::kNear);
4666 // Check that both operands are unique names. This leaves the instance
4667 // types loaded in tmp1 and tmp2.
4668 __ movq(tmp1, FieldOperand(left, HeapObject::kMapOffset));
4669 __ movq(tmp2, FieldOperand(right, HeapObject::kMapOffset));
4670 __ movzxbq(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset));
4671 __ movzxbq(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset));
4673 __ JumpIfNotUniqueName(tmp1, &miss, Label::kNear);
4674 __ JumpIfNotUniqueName(tmp2, &miss, Label::kNear);
4676 // Unique names are compared by identity.
4678 __ cmpq(left, right);
4679 // Make sure rax is non-zero. At this point input operands are
4680 // guaranteed to be non-zero.
4681 ASSERT(right.is(rax));
4682 __ j(not_equal, &done, Label::kNear);
4683 STATIC_ASSERT(EQUAL == 0);
4684 STATIC_ASSERT(kSmiTag == 0);
4685 __ Move(rax, Smi::FromInt(EQUAL));
4694 void ICCompareStub::GenerateStrings(MacroAssembler* masm) {
4695 ASSERT(state_ == CompareIC::STRING);
4698 bool equality = Token::IsEqualityOp(op_);
4700 // Registers containing left and right operands respectively.
4701 Register left = rdx;
4702 Register right = rax;
4703 Register tmp1 = rcx;
4704 Register tmp2 = rbx;
4705 Register tmp3 = rdi;
4707 // Check that both operands are heap objects.
4708 Condition cond = masm->CheckEitherSmi(left, right, tmp1);
4711 // Check that both operands are strings. This leaves the instance
4712 // types loaded in tmp1 and tmp2.
4713 __ movq(tmp1, FieldOperand(left, HeapObject::kMapOffset));
4714 __ movq(tmp2, FieldOperand(right, HeapObject::kMapOffset));
4715 __ movzxbq(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset));
4716 __ movzxbq(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset));
4717 __ movq(tmp3, tmp1);
4718 STATIC_ASSERT(kNotStringTag != 0);
4720 __ testb(tmp3, Immediate(kIsNotStringMask));
4721 __ j(not_zero, &miss);
4723 // Fast check for identical strings.
4725 __ cmpq(left, right);
4726 __ j(not_equal, ¬_same, Label::kNear);
4727 STATIC_ASSERT(EQUAL == 0);
4728 STATIC_ASSERT(kSmiTag == 0);
4729 __ Move(rax, Smi::FromInt(EQUAL));
4732 // Handle not identical strings.
4735 // Check that both strings are internalized strings. If they are, we're done
4736 // because we already know they are not identical. We also know they are both
4740 STATIC_ASSERT(kInternalizedTag == 0);
4742 __ testb(tmp1, Immediate(kIsNotInternalizedMask));
4743 __ j(not_zero, &do_compare, Label::kNear);
4744 // Make sure rax is non-zero. At this point input operands are
4745 // guaranteed to be non-zero.
4746 ASSERT(right.is(rax));
4748 __ bind(&do_compare);
4751 // Check that both strings are sequential ASCII.
4753 __ JumpIfNotBothSequentialAsciiStrings(left, right, tmp1, tmp2, &runtime);
4755 // Compare flat ASCII strings. Returns when done.
4757 StringCompareStub::GenerateFlatAsciiStringEquals(
4758 masm, left, right, tmp1, tmp2);
4760 StringCompareStub::GenerateCompareFlatAsciiStrings(
4761 masm, left, right, tmp1, tmp2, tmp3, kScratchRegister);
4764 // Handle more complex cases in runtime.
4766 __ PopReturnAddressTo(tmp1);
4769 __ PushReturnAddressFrom(tmp1);
4771 __ TailCallRuntime(Runtime::kStringEquals, 2, 1);
4773 __ TailCallRuntime(Runtime::kStringCompare, 2, 1);
4781 void ICCompareStub::GenerateObjects(MacroAssembler* masm) {
4782 ASSERT(state_ == CompareIC::OBJECT);
4784 Condition either_smi = masm->CheckEitherSmi(rdx, rax);
4785 __ j(either_smi, &miss, Label::kNear);
4787 __ CmpObjectType(rax, JS_OBJECT_TYPE, rcx);
4788 __ j(not_equal, &miss, Label::kNear);
4789 __ CmpObjectType(rdx, JS_OBJECT_TYPE, rcx);
4790 __ j(not_equal, &miss, Label::kNear);
4792 ASSERT(GetCondition() == equal);
4801 void ICCompareStub::GenerateKnownObjects(MacroAssembler* masm) {
4803 Condition either_smi = masm->CheckEitherSmi(rdx, rax);
4804 __ j(either_smi, &miss, Label::kNear);
4806 __ movq(rcx, FieldOperand(rax, HeapObject::kMapOffset));
4807 __ movq(rbx, FieldOperand(rdx, HeapObject::kMapOffset));
4808 __ Cmp(rcx, known_map_);
4809 __ j(not_equal, &miss, Label::kNear);
4810 __ Cmp(rbx, known_map_);
4811 __ j(not_equal, &miss, Label::kNear);
4821 void ICCompareStub::GenerateMiss(MacroAssembler* masm) {
4823 // Call the runtime system in a fresh internal frame.
4824 ExternalReference miss =
4825 ExternalReference(IC_Utility(IC::kCompareIC_Miss), masm->isolate());
4827 FrameScope scope(masm, StackFrame::INTERNAL);
4832 __ Push(Smi::FromInt(op_));
4833 __ CallExternalReference(miss, 3);
4835 // Compute the entry point of the rewritten stub.
4836 __ lea(rdi, FieldOperand(rax, Code::kHeaderSize));
4841 // Do a tail call to the rewritten stub.
4846 void NameDictionaryLookupStub::GenerateNegativeLookup(MacroAssembler* masm,
4849 Register properties,
4852 ASSERT(name->IsUniqueName());
4853 // If names of slots in range from 1 to kProbes - 1 for the hash value are
4854 // not equal to the name and kProbes-th slot is not used (its name is the
4855 // undefined value), it guarantees the hash table doesn't contain the
4856 // property. It's true even if some slots represent deleted properties
4857 // (their names are the hole value).
4858 for (int i = 0; i < kInlinedProbes; i++) {
4859 // r0 points to properties hash.
4860 // Compute the masked index: (hash + i + i * i) & mask.
4861 Register index = r0;
4862 // Capacity is smi 2^n.
4863 __ SmiToInteger32(index, FieldOperand(properties, kCapacityOffset));
4866 Immediate(name->Hash() + NameDictionary::GetProbeOffset(i)));
4868 // Scale the index by multiplying by the entry size.
4869 ASSERT(NameDictionary::kEntrySize == 3);
4870 __ lea(index, Operand(index, index, times_2, 0)); // index *= 3.
4872 Register entity_name = r0;
4873 // Having undefined at this place means the name is not contained.
4874 ASSERT_EQ(kSmiTagSize, 1);
4875 __ movq(entity_name, Operand(properties,
4878 kElementsStartOffset - kHeapObjectTag));
4879 __ Cmp(entity_name, masm->isolate()->factory()->undefined_value());
4882 // Stop if found the property.
4883 __ Cmp(entity_name, Handle<Name>(name));
4887 // Check for the hole and skip.
4888 __ CompareRoot(entity_name, Heap::kTheHoleValueRootIndex);
4889 __ j(equal, &good, Label::kNear);
4891 // Check if the entry name is not a unique name.
4892 __ movq(entity_name, FieldOperand(entity_name, HeapObject::kMapOffset));
4893 __ JumpIfNotUniqueName(FieldOperand(entity_name, Map::kInstanceTypeOffset),
4898 NameDictionaryLookupStub stub(properties, r0, r0, NEGATIVE_LOOKUP);
4899 __ Push(Handle<Object>(name));
4900 __ push(Immediate(name->Hash()));
4903 __ j(not_zero, miss);
4908 // Probe the name dictionary in the |elements| register. Jump to the
4909 // |done| label if a property with the given name is found leaving the
4910 // index into the dictionary in |r1|. Jump to the |miss| label
4912 void NameDictionaryLookupStub::GeneratePositiveLookup(MacroAssembler* masm,
4919 ASSERT(!elements.is(r0));
4920 ASSERT(!elements.is(r1));
4921 ASSERT(!name.is(r0));
4922 ASSERT(!name.is(r1));
4924 __ AssertName(name);
4926 __ SmiToInteger32(r0, FieldOperand(elements, kCapacityOffset));
4929 for (int i = 0; i < kInlinedProbes; i++) {
4930 // Compute the masked index: (hash + i + i * i) & mask.
4931 __ movl(r1, FieldOperand(name, Name::kHashFieldOffset));
4932 __ shrl(r1, Immediate(Name::kHashShift));
4934 __ addl(r1, Immediate(NameDictionary::GetProbeOffset(i)));
4938 // Scale the index by multiplying by the entry size.
4939 ASSERT(NameDictionary::kEntrySize == 3);
4940 __ lea(r1, Operand(r1, r1, times_2, 0)); // r1 = r1 * 3
4942 // Check if the key is identical to the name.
4943 __ cmpq(name, Operand(elements, r1, times_pointer_size,
4944 kElementsStartOffset - kHeapObjectTag));
4948 NameDictionaryLookupStub stub(elements, r0, r1, POSITIVE_LOOKUP);
4950 __ movl(r0, FieldOperand(name, Name::kHashFieldOffset));
4951 __ shrl(r0, Immediate(Name::kHashShift));
4961 void NameDictionaryLookupStub::Generate(MacroAssembler* masm) {
4962 // This stub overrides SometimesSetsUpAFrame() to return false. That means
4963 // we cannot call anything that could cause a GC from this stub.
4964 // Stack frame on entry:
4965 // rsp[0 * kPointerSize] : return address.
4966 // rsp[1 * kPointerSize] : key's hash.
4967 // rsp[2 * kPointerSize] : key.
4969 // dictionary_: NameDictionary to probe.
4970 // result_: used as scratch.
4971 // index_: will hold an index of entry if lookup is successful.
4972 // might alias with result_.
4974 // result_ is zero if lookup failed, non zero otherwise.
4976 Label in_dictionary, maybe_in_dictionary, not_in_dictionary;
4978 Register scratch = result_;
4980 __ SmiToInteger32(scratch, FieldOperand(dictionary_, kCapacityOffset));
4984 // If names of slots in range from 1 to kProbes - 1 for the hash value are
4985 // not equal to the name and kProbes-th slot is not used (its name is the
4986 // undefined value), it guarantees the hash table doesn't contain the
4987 // property. It's true even if some slots represent deleted properties
4988 // (their names are the null value).
4989 StackArgumentsAccessor args(rsp, 2, ARGUMENTS_DONT_CONTAIN_RECEIVER,
4991 for (int i = kInlinedProbes; i < kTotalProbes; i++) {
4992 // Compute the masked index: (hash + i + i * i) & mask.
4993 __ movq(scratch, args.GetArgumentOperand(1));
4995 __ addl(scratch, Immediate(NameDictionary::GetProbeOffset(i)));
4997 __ and_(scratch, Operand(rsp, 0));
4999 // Scale the index by multiplying by the entry size.
5000 ASSERT(NameDictionary::kEntrySize == 3);
5001 __ lea(index_, Operand(scratch, scratch, times_2, 0)); // index *= 3.
5003 // Having undefined at this place means the name is not contained.
5004 __ movq(scratch, Operand(dictionary_,
5007 kElementsStartOffset - kHeapObjectTag));
5009 __ Cmp(scratch, masm->isolate()->factory()->undefined_value());
5010 __ j(equal, ¬_in_dictionary);
5012 // Stop if found the property.
5013 __ cmpq(scratch, args.GetArgumentOperand(0));
5014 __ j(equal, &in_dictionary);
5016 if (i != kTotalProbes - 1 && mode_ == NEGATIVE_LOOKUP) {
5017 // If we hit a key that is not a unique name during negative
5018 // lookup we have to bailout as this key might be equal to the
5019 // key we are looking for.
5021 // Check if the entry name is not a unique name.
5022 __ movq(scratch, FieldOperand(scratch, HeapObject::kMapOffset));
5023 __ JumpIfNotUniqueName(FieldOperand(scratch, Map::kInstanceTypeOffset),
5024 &maybe_in_dictionary);
5028 __ bind(&maybe_in_dictionary);
5029 // If we are doing negative lookup then probing failure should be
5030 // treated as a lookup success. For positive lookup probing failure
5031 // should be treated as lookup failure.
5032 if (mode_ == POSITIVE_LOOKUP) {
5033 __ movq(scratch, Immediate(0));
5035 __ ret(2 * kPointerSize);
5038 __ bind(&in_dictionary);
5039 __ movq(scratch, Immediate(1));
5041 __ ret(2 * kPointerSize);
5043 __ bind(¬_in_dictionary);
5044 __ movq(scratch, Immediate(0));
5046 __ ret(2 * kPointerSize);
5050 struct AheadOfTimeWriteBarrierStubList {
5051 Register object, value, address;
5052 RememberedSetAction action;
5056 #define REG(Name) { kRegister_ ## Name ## _Code }
5058 struct AheadOfTimeWriteBarrierStubList kAheadOfTime[] = {
5059 // Used in RegExpExecStub.
5060 { REG(rbx), REG(rax), REG(rdi), EMIT_REMEMBERED_SET },
5061 // Used in CompileArrayPushCall.
5062 { REG(rbx), REG(rcx), REG(rdx), EMIT_REMEMBERED_SET },
5063 // Used in StoreStubCompiler::CompileStoreField and
5064 // KeyedStoreStubCompiler::CompileStoreField via GenerateStoreField.
5065 { REG(rdx), REG(rcx), REG(rbx), EMIT_REMEMBERED_SET },
5066 // GenerateStoreField calls the stub with two different permutations of
5067 // registers. This is the second.
5068 { REG(rbx), REG(rcx), REG(rdx), EMIT_REMEMBERED_SET },
5069 // StoreIC::GenerateNormal via GenerateDictionaryStore.
5070 { REG(rbx), REG(r8), REG(r9), EMIT_REMEMBERED_SET },
5071 // KeyedStoreIC::GenerateGeneric.
5072 { REG(rbx), REG(rdx), REG(rcx), EMIT_REMEMBERED_SET},
5073 // KeyedStoreStubCompiler::GenerateStoreFastElement.
5074 { REG(rdi), REG(rbx), REG(rcx), EMIT_REMEMBERED_SET},
5075 { REG(rdx), REG(rdi), REG(rbx), EMIT_REMEMBERED_SET},
5076 // ElementsTransitionGenerator::GenerateMapChangeElementTransition
5077 // and ElementsTransitionGenerator::GenerateSmiToDouble
5078 // and ElementsTransitionGenerator::GenerateDoubleToObject
5079 { REG(rdx), REG(rbx), REG(rdi), EMIT_REMEMBERED_SET},
5080 { REG(rdx), REG(rbx), REG(rdi), OMIT_REMEMBERED_SET},
5081 // ElementsTransitionGenerator::GenerateSmiToDouble
5082 // and ElementsTransitionGenerator::GenerateDoubleToObject
5083 { REG(rdx), REG(r11), REG(r15), EMIT_REMEMBERED_SET},
5084 // ElementsTransitionGenerator::GenerateDoubleToObject
5085 { REG(r11), REG(rax), REG(r15), EMIT_REMEMBERED_SET},
5086 // StoreArrayLiteralElementStub::Generate
5087 { REG(rbx), REG(rax), REG(rcx), EMIT_REMEMBERED_SET},
5088 // FastNewClosureStub::Generate and
5089 // StringAddStub::Generate
5090 { REG(rcx), REG(rdx), REG(rbx), EMIT_REMEMBERED_SET},
5091 // StringAddStub::Generate
5092 { REG(rcx), REG(rax), REG(rbx), EMIT_REMEMBERED_SET},
5093 // Null termination.
5094 { REG(no_reg), REG(no_reg), REG(no_reg), EMIT_REMEMBERED_SET}
5099 bool RecordWriteStub::IsPregenerated(Isolate* isolate) {
5100 for (AheadOfTimeWriteBarrierStubList* entry = kAheadOfTime;
5101 !entry->object.is(no_reg);
5103 if (object_.is(entry->object) &&
5104 value_.is(entry->value) &&
5105 address_.is(entry->address) &&
5106 remembered_set_action_ == entry->action &&
5107 save_fp_regs_mode_ == kDontSaveFPRegs) {
5115 void StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(
5117 StoreBufferOverflowStub stub1(kDontSaveFPRegs);
5118 stub1.GetCode(isolate)->set_is_pregenerated(true);
5119 StoreBufferOverflowStub stub2(kSaveFPRegs);
5120 stub2.GetCode(isolate)->set_is_pregenerated(true);
5124 void RecordWriteStub::GenerateFixedRegStubsAheadOfTime(Isolate* isolate) {
5125 for (AheadOfTimeWriteBarrierStubList* entry = kAheadOfTime;
5126 !entry->object.is(no_reg);
5128 RecordWriteStub stub(entry->object,
5133 stub.GetCode(isolate)->set_is_pregenerated(true);
5138 bool CodeStub::CanUseFPRegisters() {
5139 return true; // Always have SSE2 on x64.
5143 // Takes the input in 3 registers: address_ value_ and object_. A pointer to
5144 // the value has just been written into the object, now this stub makes sure
5145 // we keep the GC informed. The word in the object where the value has been
5146 // written is in the address register.
5147 void RecordWriteStub::Generate(MacroAssembler* masm) {
5148 Label skip_to_incremental_noncompacting;
5149 Label skip_to_incremental_compacting;
5151 // The first two instructions are generated with labels so as to get the
5152 // offset fixed up correctly by the bind(Label*) call. We patch it back and
5153 // forth between a compare instructions (a nop in this position) and the
5154 // real branch when we start and stop incremental heap marking.
5155 // See RecordWriteStub::Patch for details.
5156 __ jmp(&skip_to_incremental_noncompacting, Label::kNear);
5157 __ jmp(&skip_to_incremental_compacting, Label::kFar);
5159 if (remembered_set_action_ == EMIT_REMEMBERED_SET) {
5160 __ RememberedSetHelper(object_,
5164 MacroAssembler::kReturnAtEnd);
5169 __ bind(&skip_to_incremental_noncompacting);
5170 GenerateIncremental(masm, INCREMENTAL);
5172 __ bind(&skip_to_incremental_compacting);
5173 GenerateIncremental(masm, INCREMENTAL_COMPACTION);
5175 // Initial mode of the stub is expected to be STORE_BUFFER_ONLY.
5176 // Will be checked in IncrementalMarking::ActivateGeneratedStub.
5177 masm->set_byte_at(0, kTwoByteNopInstruction);
5178 masm->set_byte_at(2, kFiveByteNopInstruction);
5182 void RecordWriteStub::GenerateIncremental(MacroAssembler* masm, Mode mode) {
5185 if (remembered_set_action_ == EMIT_REMEMBERED_SET) {
5186 Label dont_need_remembered_set;
5188 __ movq(regs_.scratch0(), Operand(regs_.address(), 0));
5189 __ JumpIfNotInNewSpace(regs_.scratch0(),
5191 &dont_need_remembered_set);
5193 __ CheckPageFlag(regs_.object(),
5195 1 << MemoryChunk::SCAN_ON_SCAVENGE,
5197 &dont_need_remembered_set);
5199 // First notify the incremental marker if necessary, then update the
5201 CheckNeedsToInformIncrementalMarker(
5202 masm, kUpdateRememberedSetOnNoNeedToInformIncrementalMarker, mode);
5203 InformIncrementalMarker(masm, mode);
5204 regs_.Restore(masm);
5205 __ RememberedSetHelper(object_,
5209 MacroAssembler::kReturnAtEnd);
5211 __ bind(&dont_need_remembered_set);
5214 CheckNeedsToInformIncrementalMarker(
5215 masm, kReturnOnNoNeedToInformIncrementalMarker, mode);
5216 InformIncrementalMarker(masm, mode);
5217 regs_.Restore(masm);
5222 void RecordWriteStub::InformIncrementalMarker(MacroAssembler* masm, Mode mode) {
5223 regs_.SaveCallerSaveRegisters(masm, save_fp_regs_mode_);
5225 arg_reg_1.is(regs_.address()) ? kScratchRegister : regs_.address();
5226 ASSERT(!address.is(regs_.object()));
5227 ASSERT(!address.is(arg_reg_1));
5228 __ Move(address, regs_.address());
5229 __ Move(arg_reg_1, regs_.object());
5230 // TODO(gc) Can we just set address arg2 in the beginning?
5231 __ Move(arg_reg_2, address);
5232 __ LoadAddress(arg_reg_3,
5233 ExternalReference::isolate_address(masm->isolate()));
5234 int argument_count = 3;
5236 AllowExternalCallThatCantCauseGC scope(masm);
5237 __ PrepareCallCFunction(argument_count);
5238 if (mode == INCREMENTAL_COMPACTION) {
5240 ExternalReference::incremental_evacuation_record_write_function(
5244 ASSERT(mode == INCREMENTAL);
5246 ExternalReference::incremental_marking_record_write_function(
5250 regs_.RestoreCallerSaveRegisters(masm, save_fp_regs_mode_);
5254 void RecordWriteStub::CheckNeedsToInformIncrementalMarker(
5255 MacroAssembler* masm,
5256 OnNoNeedToInformIncrementalMarker on_no_need,
5259 Label need_incremental;
5260 Label need_incremental_pop_object;
5262 __ movq(regs_.scratch0(), Immediate(~Page::kPageAlignmentMask));
5263 __ and_(regs_.scratch0(), regs_.object());
5264 __ movq(regs_.scratch1(),
5265 Operand(regs_.scratch0(),
5266 MemoryChunk::kWriteBarrierCounterOffset));
5267 __ subq(regs_.scratch1(), Immediate(1));
5268 __ movq(Operand(regs_.scratch0(),
5269 MemoryChunk::kWriteBarrierCounterOffset),
5271 __ j(negative, &need_incremental);
5273 // Let's look at the color of the object: If it is not black we don't have
5274 // to inform the incremental marker.
5275 __ JumpIfBlack(regs_.object(),
5281 regs_.Restore(masm);
5282 if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) {
5283 __ RememberedSetHelper(object_,
5287 MacroAssembler::kReturnAtEnd);
5294 // Get the value from the slot.
5295 __ movq(regs_.scratch0(), Operand(regs_.address(), 0));
5297 if (mode == INCREMENTAL_COMPACTION) {
5298 Label ensure_not_white;
5300 __ CheckPageFlag(regs_.scratch0(), // Contains value.
5301 regs_.scratch1(), // Scratch.
5302 MemoryChunk::kEvacuationCandidateMask,
5307 __ CheckPageFlag(regs_.object(),
5308 regs_.scratch1(), // Scratch.
5309 MemoryChunk::kSkipEvacuationSlotsRecordingMask,
5313 __ bind(&ensure_not_white);
5316 // We need an extra register for this, so we push the object register
5318 __ push(regs_.object());
5319 __ EnsureNotWhite(regs_.scratch0(), // The value.
5320 regs_.scratch1(), // Scratch.
5321 regs_.object(), // Scratch.
5322 &need_incremental_pop_object,
5324 __ pop(regs_.object());
5326 regs_.Restore(masm);
5327 if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) {
5328 __ RememberedSetHelper(object_,
5332 MacroAssembler::kReturnAtEnd);
5337 __ bind(&need_incremental_pop_object);
5338 __ pop(regs_.object());
5340 __ bind(&need_incremental);
5342 // Fall through when we need to inform the incremental marker.
5346 void StoreArrayLiteralElementStub::Generate(MacroAssembler* masm) {
5347 // ----------- S t a t e -------------
5348 // -- rax : element value to store
5349 // -- rcx : element index as smi
5350 // -- rsp[0] : return address
5351 // -- rsp[8] : array literal index in function
5352 // -- rsp[16] : array literal
5353 // clobbers rbx, rdx, rdi
5354 // -----------------------------------
5357 Label double_elements;
5359 Label slow_elements;
5360 Label fast_elements;
5362 // Get array literal index, array literal and its map.
5363 StackArgumentsAccessor args(rsp, 2, ARGUMENTS_DONT_CONTAIN_RECEIVER);
5364 __ movq(rdx, args.GetArgumentOperand(1));
5365 __ movq(rbx, args.GetArgumentOperand(0));
5366 __ movq(rdi, FieldOperand(rbx, JSObject::kMapOffset));
5368 __ CheckFastElements(rdi, &double_elements);
5370 // FAST_*_SMI_ELEMENTS or FAST_*_ELEMENTS
5371 __ JumpIfSmi(rax, &smi_element);
5372 __ CheckFastSmiElements(rdi, &fast_elements);
5374 // Store into the array literal requires a elements transition. Call into
5377 __ bind(&slow_elements);
5378 __ PopReturnAddressTo(rdi);
5382 __ movq(rbx, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
5383 __ push(FieldOperand(rbx, JSFunction::kLiteralsOffset));
5385 __ PushReturnAddressFrom(rdi);
5386 __ TailCallRuntime(Runtime::kStoreArrayLiteralElement, 5, 1);
5388 // Array literal has ElementsKind of FAST_*_ELEMENTS and value is an object.
5389 __ bind(&fast_elements);
5390 __ SmiToInteger32(kScratchRegister, rcx);
5391 __ movq(rbx, FieldOperand(rbx, JSObject::kElementsOffset));
5392 __ lea(rcx, FieldOperand(rbx, kScratchRegister, times_pointer_size,
5393 FixedArrayBase::kHeaderSize));
5394 __ movq(Operand(rcx, 0), rax);
5395 // Update the write barrier for the array store.
5396 __ RecordWrite(rbx, rcx, rax,
5398 EMIT_REMEMBERED_SET,
5402 // Array literal has ElementsKind of FAST_*_SMI_ELEMENTS or
5403 // FAST_*_ELEMENTS, and value is Smi.
5404 __ bind(&smi_element);
5405 __ SmiToInteger32(kScratchRegister, rcx);
5406 __ movq(rbx, FieldOperand(rbx, JSObject::kElementsOffset));
5407 __ movq(FieldOperand(rbx, kScratchRegister, times_pointer_size,
5408 FixedArrayBase::kHeaderSize), rax);
5411 // Array literal has ElementsKind of FAST_DOUBLE_ELEMENTS.
5412 __ bind(&double_elements);
5414 __ movq(r9, FieldOperand(rbx, JSObject::kElementsOffset));
5415 __ SmiToInteger32(r11, rcx);
5416 __ StoreNumberToDoubleElements(rax,
5425 void StubFailureTrampolineStub::Generate(MacroAssembler* masm) {
5426 CEntryStub ces(1, fp_registers_ ? kSaveFPRegs : kDontSaveFPRegs);
5427 __ Call(ces.GetCode(masm->isolate()), RelocInfo::CODE_TARGET);
5428 int parameter_count_offset =
5429 StubFailureTrampolineFrame::kCallerStackParameterCountFrameOffset;
5430 __ movq(rbx, MemOperand(rbp, parameter_count_offset));
5431 masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE);
5432 __ PopReturnAddressTo(rcx);
5433 int additional_offset = function_mode_ == JS_FUNCTION_STUB_MODE
5436 __ lea(rsp, MemOperand(rsp, rbx, times_pointer_size, additional_offset));
5437 __ jmp(rcx); // Return to IC Miss stub, continuation still on stack.
5441 void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) {
5442 if (masm->isolate()->function_entry_hook() != NULL) {
5443 // It's always safe to call the entry hook stub, as the hook itself
5444 // is not allowed to call back to V8.
5445 AllowStubCallsScope allow_stub_calls(masm, true);
5447 ProfileEntryHookStub stub;
5448 masm->CallStub(&stub);
5453 void ProfileEntryHookStub::Generate(MacroAssembler* masm) {
5454 // This stub can be called from essentially anywhere, so it needs to save
5455 // all volatile and callee-save registers.
5456 const size_t kNumSavedRegisters = 2;
5460 // Calculate the original stack pointer and store it in the second arg.
5461 __ lea(arg_reg_2, Operand(rsp, (kNumSavedRegisters + 1) * kPointerSize));
5463 // Calculate the function address to the first arg.
5464 __ movq(arg_reg_1, Operand(rsp, kNumSavedRegisters * kPointerSize));
5465 __ subq(arg_reg_1, Immediate(Assembler::kShortCallInstructionLength));
5467 // Save the remainder of the volatile registers.
5468 masm->PushCallerSaved(kSaveFPRegs, arg_reg_1, arg_reg_2);
5470 // Call the entry hook function.
5471 __ movq(rax, FUNCTION_ADDR(masm->isolate()->function_entry_hook()),
5474 AllowExternalCallThatCantCauseGC scope(masm);
5476 const int kArgumentCount = 2;
5477 __ PrepareCallCFunction(kArgumentCount);
5478 __ CallCFunction(rax, kArgumentCount);
5480 // Restore volatile regs.
5481 masm->PopCallerSaved(kSaveFPRegs, arg_reg_1, arg_reg_2);
5490 static void CreateArrayDispatch(MacroAssembler* masm,
5491 AllocationSiteOverrideMode mode) {
5492 if (mode == DISABLE_ALLOCATION_SITES) {
5493 T stub(GetInitialFastElementsKind(),
5494 CONTEXT_CHECK_REQUIRED,
5496 __ TailCallStub(&stub);
5497 } else if (mode == DONT_OVERRIDE) {
5498 int last_index = GetSequenceIndexFromFastElementsKind(
5499 TERMINAL_FAST_ELEMENTS_KIND);
5500 for (int i = 0; i <= last_index; ++i) {
5502 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
5503 __ cmpl(rdx, Immediate(kind));
5504 __ j(not_equal, &next);
5506 __ TailCallStub(&stub);
5510 // If we reached this point there is a problem.
5511 __ Abort(kUnexpectedElementsKindInArrayConstructor);
5518 static void CreateArrayDispatchOneArgument(MacroAssembler* masm,
5519 AllocationSiteOverrideMode mode) {
5520 // rbx - type info cell (if mode != DISABLE_ALLOCATION_SITES)
5521 // rdx - kind (if mode != DISABLE_ALLOCATION_SITES)
5522 // rax - number of arguments
5523 // rdi - constructor?
5524 // rsp[0] - return address
5525 // rsp[8] - last argument
5526 Handle<Object> undefined_sentinel(
5527 masm->isolate()->heap()->undefined_value(),
5530 Label normal_sequence;
5531 if (mode == DONT_OVERRIDE) {
5532 ASSERT(FAST_SMI_ELEMENTS == 0);
5533 ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
5534 ASSERT(FAST_ELEMENTS == 2);
5535 ASSERT(FAST_HOLEY_ELEMENTS == 3);
5536 ASSERT(FAST_DOUBLE_ELEMENTS == 4);
5537 ASSERT(FAST_HOLEY_DOUBLE_ELEMENTS == 5);
5539 // is the low bit set? If so, we are holey and that is good.
5540 __ testb(rdx, Immediate(1));
5541 __ j(not_zero, &normal_sequence);
5544 // look at the first argument
5545 StackArgumentsAccessor args(rsp, 1, ARGUMENTS_DONT_CONTAIN_RECEIVER);
5546 __ movq(rcx, args.GetArgumentOperand(0));
5548 __ j(zero, &normal_sequence);
5550 if (mode == DISABLE_ALLOCATION_SITES) {
5551 ElementsKind initial = GetInitialFastElementsKind();
5552 ElementsKind holey_initial = GetHoleyElementsKind(initial);
5554 ArraySingleArgumentConstructorStub stub_holey(holey_initial,
5555 CONTEXT_CHECK_REQUIRED,
5556 DISABLE_ALLOCATION_SITES);
5557 __ TailCallStub(&stub_holey);
5559 __ bind(&normal_sequence);
5560 ArraySingleArgumentConstructorStub stub(initial,
5561 CONTEXT_CHECK_REQUIRED,
5562 DISABLE_ALLOCATION_SITES);
5563 __ TailCallStub(&stub);
5564 } else if (mode == DONT_OVERRIDE) {
5565 // We are going to create a holey array, but our kind is non-holey.
5566 // Fix kind and retry (only if we have an allocation site in the cell).
5568 __ movq(rcx, FieldOperand(rbx, Cell::kValueOffset));
5569 if (FLAG_debug_code) {
5570 Handle<Map> allocation_site_map =
5571 masm->isolate()->factory()->allocation_site_map();
5572 __ Cmp(FieldOperand(rcx, 0), allocation_site_map);
5573 __ Assert(equal, kExpectedAllocationSiteInCell);
5576 // Save the resulting elements kind in type info
5577 __ Integer32ToSmi(rdx, rdx);
5578 __ movq(FieldOperand(rcx, AllocationSite::kTransitionInfoOffset), rdx);
5579 __ SmiToInteger32(rdx, rdx);
5581 __ bind(&normal_sequence);
5582 int last_index = GetSequenceIndexFromFastElementsKind(
5583 TERMINAL_FAST_ELEMENTS_KIND);
5584 for (int i = 0; i <= last_index; ++i) {
5586 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
5587 __ cmpl(rdx, Immediate(kind));
5588 __ j(not_equal, &next);
5589 ArraySingleArgumentConstructorStub stub(kind);
5590 __ TailCallStub(&stub);
5594 // If we reached this point there is a problem.
5595 __ Abort(kUnexpectedElementsKindInArrayConstructor);
5603 static void ArrayConstructorStubAheadOfTimeHelper(Isolate* isolate) {
5604 ElementsKind initial_kind = GetInitialFastElementsKind();
5605 ElementsKind initial_holey_kind = GetHoleyElementsKind(initial_kind);
5607 int to_index = GetSequenceIndexFromFastElementsKind(
5608 TERMINAL_FAST_ELEMENTS_KIND);
5609 for (int i = 0; i <= to_index; ++i) {
5610 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
5612 stub.GetCode(isolate)->set_is_pregenerated(true);
5613 if (AllocationSite::GetMode(kind) != DONT_TRACK_ALLOCATION_SITE ||
5614 (!FLAG_track_allocation_sites &&
5615 (kind == initial_kind || kind == initial_holey_kind))) {
5616 T stub1(kind, CONTEXT_CHECK_REQUIRED, DISABLE_ALLOCATION_SITES);
5617 stub1.GetCode(isolate)->set_is_pregenerated(true);
5623 void ArrayConstructorStubBase::GenerateStubsAheadOfTime(Isolate* isolate) {
5624 ArrayConstructorStubAheadOfTimeHelper<ArrayNoArgumentConstructorStub>(
5626 ArrayConstructorStubAheadOfTimeHelper<ArraySingleArgumentConstructorStub>(
5628 ArrayConstructorStubAheadOfTimeHelper<ArrayNArgumentsConstructorStub>(
5633 void InternalArrayConstructorStubBase::GenerateStubsAheadOfTime(
5635 ElementsKind kinds[2] = { FAST_ELEMENTS, FAST_HOLEY_ELEMENTS };
5636 for (int i = 0; i < 2; i++) {
5637 // For internal arrays we only need a few things
5638 InternalArrayNoArgumentConstructorStub stubh1(kinds[i]);
5639 stubh1.GetCode(isolate)->set_is_pregenerated(true);
5640 InternalArraySingleArgumentConstructorStub stubh2(kinds[i]);
5641 stubh2.GetCode(isolate)->set_is_pregenerated(true);
5642 InternalArrayNArgumentsConstructorStub stubh3(kinds[i]);
5643 stubh3.GetCode(isolate)->set_is_pregenerated(true);
5648 void ArrayConstructorStub::GenerateDispatchToArrayStub(
5649 MacroAssembler* masm,
5650 AllocationSiteOverrideMode mode) {
5651 if (argument_count_ == ANY) {
5652 Label not_zero_case, not_one_case;
5654 __ j(not_zero, ¬_zero_case);
5655 CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm, mode);
5657 __ bind(¬_zero_case);
5658 __ cmpl(rax, Immediate(1));
5659 __ j(greater, ¬_one_case);
5660 CreateArrayDispatchOneArgument(masm, mode);
5662 __ bind(¬_one_case);
5663 CreateArrayDispatch<ArrayNArgumentsConstructorStub>(masm, mode);
5664 } else if (argument_count_ == NONE) {
5665 CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm, mode);
5666 } else if (argument_count_ == ONE) {
5667 CreateArrayDispatchOneArgument(masm, mode);
5668 } else if (argument_count_ == MORE_THAN_ONE) {
5669 CreateArrayDispatch<ArrayNArgumentsConstructorStub>(masm, mode);
5676 void ArrayConstructorStub::Generate(MacroAssembler* masm) {
5677 // ----------- S t a t e -------------
5679 // -- rbx : type info cell
5680 // -- rdi : constructor
5681 // -- rsp[0] : return address
5682 // -- rsp[8] : last argument
5683 // -----------------------------------
5684 Handle<Object> undefined_sentinel(
5685 masm->isolate()->heap()->undefined_value(),
5688 if (FLAG_debug_code) {
5689 // The array construct code is only set for the global and natives
5690 // builtin Array functions which always have maps.
5692 // Initial map for the builtin Array function should be a map.
5693 __ movq(rcx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
5694 // Will both indicate a NULL and a Smi.
5695 STATIC_ASSERT(kSmiTag == 0);
5696 Condition not_smi = NegateCondition(masm->CheckSmi(rcx));
5697 __ Check(not_smi, kUnexpectedInitialMapForArrayFunction);
5698 __ CmpObjectType(rcx, MAP_TYPE, rcx);
5699 __ Check(equal, kUnexpectedInitialMapForArrayFunction);
5701 // We should either have undefined in rbx or a valid cell
5703 Handle<Map> cell_map = masm->isolate()->factory()->cell_map();
5704 __ Cmp(rbx, undefined_sentinel);
5705 __ j(equal, &okay_here);
5706 __ Cmp(FieldOperand(rbx, 0), cell_map);
5707 __ Assert(equal, kExpectedPropertyCellInRegisterRbx);
5708 __ bind(&okay_here);
5712 // If the type cell is undefined, or contains anything other than an
5713 // AllocationSite, call an array constructor that doesn't use AllocationSites.
5714 __ Cmp(rbx, undefined_sentinel);
5715 __ j(equal, &no_info);
5716 __ movq(rdx, FieldOperand(rbx, Cell::kValueOffset));
5717 __ Cmp(FieldOperand(rdx, 0),
5718 masm->isolate()->factory()->allocation_site_map());
5719 __ j(not_equal, &no_info);
5721 __ movq(rdx, FieldOperand(rdx, AllocationSite::kTransitionInfoOffset));
5722 __ SmiToInteger32(rdx, rdx);
5723 GenerateDispatchToArrayStub(masm, DONT_OVERRIDE);
5726 GenerateDispatchToArrayStub(masm, DISABLE_ALLOCATION_SITES);
5730 void InternalArrayConstructorStub::GenerateCase(
5731 MacroAssembler* masm, ElementsKind kind) {
5732 Label not_zero_case, not_one_case;
5733 Label normal_sequence;
5736 __ j(not_zero, ¬_zero_case);
5737 InternalArrayNoArgumentConstructorStub stub0(kind);
5738 __ TailCallStub(&stub0);
5740 __ bind(¬_zero_case);
5741 __ cmpl(rax, Immediate(1));
5742 __ j(greater, ¬_one_case);
5744 if (IsFastPackedElementsKind(kind)) {
5745 // We might need to create a holey array
5746 // look at the first argument
5747 StackArgumentsAccessor args(rsp, 1, ARGUMENTS_DONT_CONTAIN_RECEIVER);
5748 __ movq(rcx, args.GetArgumentOperand(0));
5750 __ j(zero, &normal_sequence);
5752 InternalArraySingleArgumentConstructorStub
5753 stub1_holey(GetHoleyElementsKind(kind));
5754 __ TailCallStub(&stub1_holey);
5757 __ bind(&normal_sequence);
5758 InternalArraySingleArgumentConstructorStub stub1(kind);
5759 __ TailCallStub(&stub1);
5761 __ bind(¬_one_case);
5762 InternalArrayNArgumentsConstructorStub stubN(kind);
5763 __ TailCallStub(&stubN);
5767 void InternalArrayConstructorStub::Generate(MacroAssembler* masm) {
5768 // ----------- S t a t e -------------
5770 // -- rbx : type info cell
5771 // -- rdi : constructor
5772 // -- rsp[0] : return address
5773 // -- rsp[8] : last argument
5774 // -----------------------------------
5776 if (FLAG_debug_code) {
5777 // The array construct code is only set for the global and natives
5778 // builtin Array functions which always have maps.
5780 // Initial map for the builtin Array function should be a map.
5781 __ movq(rcx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
5782 // Will both indicate a NULL and a Smi.
5783 STATIC_ASSERT(kSmiTag == 0);
5784 Condition not_smi = NegateCondition(masm->CheckSmi(rcx));
5785 __ Check(not_smi, kUnexpectedInitialMapForArrayFunction);
5786 __ CmpObjectType(rcx, MAP_TYPE, rcx);
5787 __ Check(equal, kUnexpectedInitialMapForArrayFunction);
5790 // Figure out the right elements kind
5791 __ movq(rcx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
5793 // Load the map's "bit field 2" into |result|. We only need the first byte,
5794 // but the following masking takes care of that anyway.
5795 __ movzxbq(rcx, FieldOperand(rcx, Map::kBitField2Offset));
5796 // Retrieve elements_kind from bit field 2.
5797 __ and_(rcx, Immediate(Map::kElementsKindMask));
5798 __ shr(rcx, Immediate(Map::kElementsKindShift));
5800 if (FLAG_debug_code) {
5802 __ cmpl(rcx, Immediate(FAST_ELEMENTS));
5804 __ cmpl(rcx, Immediate(FAST_HOLEY_ELEMENTS));
5806 kInvalidElementsKindForInternalArrayOrInternalPackedArray);
5810 Label fast_elements_case;
5811 __ cmpl(rcx, Immediate(FAST_ELEMENTS));
5812 __ j(equal, &fast_elements_case);
5813 GenerateCase(masm, FAST_HOLEY_ELEMENTS);
5815 __ bind(&fast_elements_case);
5816 GenerateCase(masm, FAST_ELEMENTS);
5822 } } // namespace v8::internal
5824 #endif // V8_TARGET_ARCH_X64