1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
7 #include "src/bootstrapper.h"
8 #include "src/code-stubs.h"
9 #include "src/codegen.h"
10 #include "src/ic/handler-compiler.h"
11 #include "src/ic/ic.h"
12 #include "src/ic/stub-cache.h"
13 #include "src/isolate.h"
14 #include "src/regexp/jsregexp.h"
15 #include "src/regexp/regexp-macro-assembler.h"
16 #include "src/runtime/runtime.h"
22 static void InitializeArrayConstructorDescriptor(
23 Isolate* isolate, CodeStubDescriptor* descriptor,
24 int constant_stack_parameter_count) {
25 Address deopt_handler = Runtime::FunctionForId(
26 Runtime::kArrayConstructor)->entry;
28 if (constant_stack_parameter_count == 0) {
29 descriptor->Initialize(deopt_handler, constant_stack_parameter_count,
30 JS_FUNCTION_STUB_MODE);
32 descriptor->Initialize(rax, deopt_handler, constant_stack_parameter_count,
33 JS_FUNCTION_STUB_MODE);
38 static void InitializeInternalArrayConstructorDescriptor(
39 Isolate* isolate, CodeStubDescriptor* descriptor,
40 int constant_stack_parameter_count) {
41 Address deopt_handler = Runtime::FunctionForId(
42 Runtime::kInternalArrayConstructor)->entry;
44 if (constant_stack_parameter_count == 0) {
45 descriptor->Initialize(deopt_handler, constant_stack_parameter_count,
46 JS_FUNCTION_STUB_MODE);
48 descriptor->Initialize(rax, deopt_handler, constant_stack_parameter_count,
49 JS_FUNCTION_STUB_MODE);
54 void ArrayNoArgumentConstructorStub::InitializeDescriptor(
55 CodeStubDescriptor* descriptor) {
56 InitializeArrayConstructorDescriptor(isolate(), descriptor, 0);
60 void ArraySingleArgumentConstructorStub::InitializeDescriptor(
61 CodeStubDescriptor* descriptor) {
62 InitializeArrayConstructorDescriptor(isolate(), descriptor, 1);
66 void ArrayNArgumentsConstructorStub::InitializeDescriptor(
67 CodeStubDescriptor* descriptor) {
68 InitializeArrayConstructorDescriptor(isolate(), descriptor, -1);
72 void InternalArrayNoArgumentConstructorStub::InitializeDescriptor(
73 CodeStubDescriptor* descriptor) {
74 InitializeInternalArrayConstructorDescriptor(isolate(), descriptor, 0);
78 void InternalArraySingleArgumentConstructorStub::InitializeDescriptor(
79 CodeStubDescriptor* descriptor) {
80 InitializeInternalArrayConstructorDescriptor(isolate(), descriptor, 1);
84 void InternalArrayNArgumentsConstructorStub::InitializeDescriptor(
85 CodeStubDescriptor* descriptor) {
86 InitializeInternalArrayConstructorDescriptor(isolate(), descriptor, -1);
90 #define __ ACCESS_MASM(masm)
93 void HydrogenCodeStub::GenerateLightweightMiss(MacroAssembler* masm,
94 ExternalReference miss) {
95 // Update the static counter each time a new code stub is generated.
96 isolate()->counters()->code_stubs()->Increment();
98 CallInterfaceDescriptor descriptor = GetCallInterfaceDescriptor();
99 int param_count = descriptor.GetRegisterParameterCount();
101 // Call the runtime system in a fresh internal frame.
102 FrameScope scope(masm, StackFrame::INTERNAL);
103 DCHECK(param_count == 0 ||
104 rax.is(descriptor.GetRegisterParameter(param_count - 1)));
106 for (int i = 0; i < param_count; ++i) {
107 __ Push(descriptor.GetRegisterParameter(i));
109 __ CallExternalReference(miss, param_count);
116 void StoreBufferOverflowStub::Generate(MacroAssembler* masm) {
117 __ PushCallerSaved(save_doubles() ? kSaveFPRegs : kDontSaveFPRegs);
118 const int argument_count = 1;
119 __ PrepareCallCFunction(argument_count);
120 __ LoadAddress(arg_reg_1,
121 ExternalReference::isolate_address(isolate()));
123 AllowExternalCallThatCantCauseGC scope(masm);
125 ExternalReference::store_buffer_overflow_function(isolate()),
127 __ PopCallerSaved(save_doubles() ? kSaveFPRegs : kDontSaveFPRegs);
132 class FloatingPointHelper : public AllStatic {
134 enum ConvertUndefined {
135 CONVERT_UNDEFINED_TO_ZERO,
138 // Load the operands from rdx and rax into xmm0 and xmm1, as doubles.
139 // If the operands are not both numbers, jump to not_numbers.
140 // Leaves rdx and rax unchanged. SmiOperands assumes both are smis.
141 // NumberOperands assumes both are smis or heap numbers.
142 static void LoadSSE2UnknownOperands(MacroAssembler* masm,
147 void DoubleToIStub::Generate(MacroAssembler* masm) {
148 Register input_reg = this->source();
149 Register final_result_reg = this->destination();
150 DCHECK(is_truncating());
152 Label check_negative, process_64_bits, done;
154 int double_offset = offset();
156 // Account for return address and saved regs if input is rsp.
157 if (input_reg.is(rsp)) double_offset += 3 * kRegisterSize;
159 MemOperand mantissa_operand(MemOperand(input_reg, double_offset));
160 MemOperand exponent_operand(MemOperand(input_reg,
161 double_offset + kDoubleSize / 2));
164 Register scratch_candidates[3] = { rbx, rdx, rdi };
165 for (int i = 0; i < 3; i++) {
166 scratch1 = scratch_candidates[i];
167 if (!final_result_reg.is(scratch1) && !input_reg.is(scratch1)) break;
170 // Since we must use rcx for shifts below, use some other register (rax)
171 // to calculate the result if ecx is the requested return register.
172 Register result_reg = final_result_reg.is(rcx) ? rax : final_result_reg;
173 // Save ecx if it isn't the return register and therefore volatile, or if it
174 // is the return register, then save the temp register we use in its stead
176 Register save_reg = final_result_reg.is(rcx) ? rax : rcx;
180 bool stash_exponent_copy = !input_reg.is(rsp);
181 __ movl(scratch1, mantissa_operand);
182 __ movsd(xmm0, mantissa_operand);
183 __ movl(rcx, exponent_operand);
184 if (stash_exponent_copy) __ pushq(rcx);
186 __ andl(rcx, Immediate(HeapNumber::kExponentMask));
187 __ shrl(rcx, Immediate(HeapNumber::kExponentShift));
188 __ leal(result_reg, MemOperand(rcx, -HeapNumber::kExponentBias));
189 __ cmpl(result_reg, Immediate(HeapNumber::kMantissaBits));
190 __ j(below, &process_64_bits);
192 // Result is entirely in lower 32-bits of mantissa
193 int delta = HeapNumber::kExponentBias + Double::kPhysicalSignificandSize;
194 __ subl(rcx, Immediate(delta));
195 __ xorl(result_reg, result_reg);
196 __ cmpl(rcx, Immediate(31));
198 __ shll_cl(scratch1);
199 __ jmp(&check_negative);
201 __ bind(&process_64_bits);
202 __ cvttsd2siq(result_reg, xmm0);
203 __ jmp(&done, Label::kNear);
205 // If the double was negative, negate the integer result.
206 __ bind(&check_negative);
207 __ movl(result_reg, scratch1);
209 if (stash_exponent_copy) {
210 __ cmpl(MemOperand(rsp, 0), Immediate(0));
212 __ cmpl(exponent_operand, Immediate(0));
214 __ cmovl(greater, result_reg, scratch1);
218 if (stash_exponent_copy) {
219 __ addp(rsp, Immediate(kDoubleSize));
221 if (!final_result_reg.is(result_reg)) {
222 DCHECK(final_result_reg.is(rcx));
223 __ movl(final_result_reg, result_reg);
231 void FloatingPointHelper::LoadSSE2UnknownOperands(MacroAssembler* masm,
232 Label* not_numbers) {
233 Label load_smi_rdx, load_nonsmi_rax, load_smi_rax, load_float_rax, done;
234 // Load operand in rdx into xmm0, or branch to not_numbers.
235 __ LoadRoot(rcx, Heap::kHeapNumberMapRootIndex);
236 __ JumpIfSmi(rdx, &load_smi_rdx);
237 __ cmpp(FieldOperand(rdx, HeapObject::kMapOffset), rcx);
238 __ j(not_equal, not_numbers); // Argument in rdx is not a number.
239 __ movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset));
240 // Load operand in rax into xmm1, or branch to not_numbers.
241 __ JumpIfSmi(rax, &load_smi_rax);
243 __ bind(&load_nonsmi_rax);
244 __ cmpp(FieldOperand(rax, HeapObject::kMapOffset), rcx);
245 __ j(not_equal, not_numbers);
246 __ movsd(xmm1, FieldOperand(rax, HeapNumber::kValueOffset));
249 __ bind(&load_smi_rdx);
250 __ SmiToInteger32(kScratchRegister, rdx);
251 __ Cvtlsi2sd(xmm0, kScratchRegister);
252 __ JumpIfNotSmi(rax, &load_nonsmi_rax);
254 __ bind(&load_smi_rax);
255 __ SmiToInteger32(kScratchRegister, rax);
256 __ Cvtlsi2sd(xmm1, kScratchRegister);
261 void MathPowStub::Generate(MacroAssembler* masm) {
262 const Register exponent = MathPowTaggedDescriptor::exponent();
263 DCHECK(exponent.is(rdx));
264 const Register base = rax;
265 const Register scratch = rcx;
266 const XMMRegister double_result = xmm3;
267 const XMMRegister double_base = xmm2;
268 const XMMRegister double_exponent = xmm1;
269 const XMMRegister double_scratch = xmm4;
271 Label call_runtime, done, exponent_not_smi, int_exponent;
273 // Save 1 in double_result - we need this several times later on.
274 __ movp(scratch, Immediate(1));
275 __ Cvtlsi2sd(double_result, scratch);
277 if (exponent_type() == ON_STACK) {
278 Label base_is_smi, unpack_exponent;
279 // The exponent and base are supplied as arguments on the stack.
280 // This can only happen if the stub is called from non-optimized code.
281 // Load input parameters from stack.
282 StackArgumentsAccessor args(rsp, 2, ARGUMENTS_DONT_CONTAIN_RECEIVER);
283 __ movp(base, args.GetArgumentOperand(0));
284 __ movp(exponent, args.GetArgumentOperand(1));
285 __ JumpIfSmi(base, &base_is_smi, Label::kNear);
286 __ CompareRoot(FieldOperand(base, HeapObject::kMapOffset),
287 Heap::kHeapNumberMapRootIndex);
288 __ j(not_equal, &call_runtime);
290 __ movsd(double_base, FieldOperand(base, HeapNumber::kValueOffset));
291 __ jmp(&unpack_exponent, Label::kNear);
293 __ bind(&base_is_smi);
294 __ SmiToInteger32(base, base);
295 __ Cvtlsi2sd(double_base, base);
296 __ bind(&unpack_exponent);
298 __ JumpIfNotSmi(exponent, &exponent_not_smi, Label::kNear);
299 __ SmiToInteger32(exponent, exponent);
300 __ jmp(&int_exponent);
302 __ bind(&exponent_not_smi);
303 __ CompareRoot(FieldOperand(exponent, HeapObject::kMapOffset),
304 Heap::kHeapNumberMapRootIndex);
305 __ j(not_equal, &call_runtime);
306 __ movsd(double_exponent, FieldOperand(exponent, HeapNumber::kValueOffset));
307 } else if (exponent_type() == TAGGED) {
308 __ JumpIfNotSmi(exponent, &exponent_not_smi, Label::kNear);
309 __ SmiToInteger32(exponent, exponent);
310 __ jmp(&int_exponent);
312 __ bind(&exponent_not_smi);
313 __ movsd(double_exponent, FieldOperand(exponent, HeapNumber::kValueOffset));
316 if (exponent_type() != INTEGER) {
317 Label fast_power, try_arithmetic_simplification;
318 // Detect integer exponents stored as double.
319 __ DoubleToI(exponent, double_exponent, double_scratch,
320 TREAT_MINUS_ZERO_AS_ZERO, &try_arithmetic_simplification,
321 &try_arithmetic_simplification,
322 &try_arithmetic_simplification);
323 __ jmp(&int_exponent);
325 __ bind(&try_arithmetic_simplification);
326 __ cvttsd2si(exponent, double_exponent);
327 // Skip to runtime if possibly NaN (indicated by the indefinite integer).
328 __ cmpl(exponent, Immediate(0x1));
329 __ j(overflow, &call_runtime);
331 if (exponent_type() == ON_STACK) {
332 // Detect square root case. Crankshaft detects constant +/-0.5 at
333 // compile time and uses DoMathPowHalf instead. We then skip this check
334 // for non-constant cases of +/-0.5 as these hardly occur.
335 Label continue_sqrt, continue_rsqrt, not_plus_half;
337 // Load double_scratch with 0.5.
338 __ movq(scratch, V8_UINT64_C(0x3FE0000000000000));
339 __ movq(double_scratch, scratch);
340 // Already ruled out NaNs for exponent.
341 __ ucomisd(double_scratch, double_exponent);
342 __ j(not_equal, ¬_plus_half, Label::kNear);
344 // Calculates square root of base. Check for the special case of
345 // Math.pow(-Infinity, 0.5) == Infinity (ECMA spec, 15.8.2.13).
346 // According to IEEE-754, double-precision -Infinity has the highest
347 // 12 bits set and the lowest 52 bits cleared.
348 __ movq(scratch, V8_UINT64_C(0xFFF0000000000000));
349 __ movq(double_scratch, scratch);
350 __ ucomisd(double_scratch, double_base);
351 // Comparing -Infinity with NaN results in "unordered", which sets the
352 // zero flag as if both were equal. However, it also sets the carry flag.
353 __ j(not_equal, &continue_sqrt, Label::kNear);
354 __ j(carry, &continue_sqrt, Label::kNear);
356 // Set result to Infinity in the special case.
357 __ xorps(double_result, double_result);
358 __ subsd(double_result, double_scratch);
361 __ bind(&continue_sqrt);
362 // sqrtsd returns -0 when input is -0. ECMA spec requires +0.
363 __ xorps(double_scratch, double_scratch);
364 __ addsd(double_scratch, double_base); // Convert -0 to 0.
365 __ sqrtsd(double_result, double_scratch);
369 __ bind(¬_plus_half);
370 // Load double_scratch with -0.5 by substracting 1.
371 __ subsd(double_scratch, double_result);
372 // Already ruled out NaNs for exponent.
373 __ ucomisd(double_scratch, double_exponent);
374 __ j(not_equal, &fast_power, Label::kNear);
376 // Calculates reciprocal of square root of base. Check for the special
377 // case of Math.pow(-Infinity, -0.5) == 0 (ECMA spec, 15.8.2.13).
378 // According to IEEE-754, double-precision -Infinity has the highest
379 // 12 bits set and the lowest 52 bits cleared.
380 __ movq(scratch, V8_UINT64_C(0xFFF0000000000000));
381 __ movq(double_scratch, scratch);
382 __ ucomisd(double_scratch, double_base);
383 // Comparing -Infinity with NaN results in "unordered", which sets the
384 // zero flag as if both were equal. However, it also sets the carry flag.
385 __ j(not_equal, &continue_rsqrt, Label::kNear);
386 __ j(carry, &continue_rsqrt, Label::kNear);
388 // Set result to 0 in the special case.
389 __ xorps(double_result, double_result);
392 __ bind(&continue_rsqrt);
393 // sqrtsd returns -0 when input is -0. ECMA spec requires +0.
394 __ xorps(double_exponent, double_exponent);
395 __ addsd(double_exponent, double_base); // Convert -0 to +0.
396 __ sqrtsd(double_exponent, double_exponent);
397 __ divsd(double_result, double_exponent);
401 // Using FPU instructions to calculate power.
402 Label fast_power_failed;
403 __ bind(&fast_power);
404 __ fnclex(); // Clear flags to catch exceptions later.
405 // Transfer (B)ase and (E)xponent onto the FPU register stack.
406 __ subp(rsp, Immediate(kDoubleSize));
407 __ movsd(Operand(rsp, 0), double_exponent);
408 __ fld_d(Operand(rsp, 0)); // E
409 __ movsd(Operand(rsp, 0), double_base);
410 __ fld_d(Operand(rsp, 0)); // B, E
412 // Exponent is in st(1) and base is in st(0)
413 // B ^ E = (2^(E * log2(B)) - 1) + 1 = (2^X - 1) + 1 for X = E * log2(B)
414 // FYL2X calculates st(1) * log2(st(0))
417 __ frndint(); // rnd(X), X
418 __ fsub(1); // rnd(X), X-rnd(X)
419 __ fxch(1); // X - rnd(X), rnd(X)
420 // F2XM1 calculates 2^st(0) - 1 for -1 < st(0) < 1
421 __ f2xm1(); // 2^(X-rnd(X)) - 1, rnd(X)
422 __ fld1(); // 1, 2^(X-rnd(X)) - 1, rnd(X)
423 __ faddp(1); // 2^(X-rnd(X)), rnd(X)
424 // FSCALE calculates st(0) * 2^st(1)
425 __ fscale(); // 2^X, rnd(X)
427 // Bail out to runtime in case of exceptions in the status word.
429 __ testb(rax, Immediate(0x5F)); // Check for all but precision exception.
430 __ j(not_zero, &fast_power_failed, Label::kNear);
431 __ fstp_d(Operand(rsp, 0));
432 __ movsd(double_result, Operand(rsp, 0));
433 __ addp(rsp, Immediate(kDoubleSize));
436 __ bind(&fast_power_failed);
438 __ addp(rsp, Immediate(kDoubleSize));
439 __ jmp(&call_runtime);
442 // Calculate power with integer exponent.
443 __ bind(&int_exponent);
444 const XMMRegister double_scratch2 = double_exponent;
445 // Back up exponent as we need to check if exponent is negative later.
446 __ movp(scratch, exponent); // Back up exponent.
447 __ movsd(double_scratch, double_base); // Back up base.
448 __ movsd(double_scratch2, double_result); // Load double_exponent with 1.
450 // Get absolute value of exponent.
451 Label no_neg, while_true, while_false;
452 __ testl(scratch, scratch);
453 __ j(positive, &no_neg, Label::kNear);
457 __ j(zero, &while_false, Label::kNear);
458 __ shrl(scratch, Immediate(1));
459 // Above condition means CF==0 && ZF==0. This means that the
460 // bit that has been shifted out is 0 and the result is not 0.
461 __ j(above, &while_true, Label::kNear);
462 __ movsd(double_result, double_scratch);
463 __ j(zero, &while_false, Label::kNear);
465 __ bind(&while_true);
466 __ shrl(scratch, Immediate(1));
467 __ mulsd(double_scratch, double_scratch);
468 __ j(above, &while_true, Label::kNear);
469 __ mulsd(double_result, double_scratch);
470 __ j(not_zero, &while_true);
472 __ bind(&while_false);
473 // If the exponent is negative, return 1/result.
474 __ testl(exponent, exponent);
475 __ j(greater, &done);
476 __ divsd(double_scratch2, double_result);
477 __ movsd(double_result, double_scratch2);
478 // Test whether result is zero. Bail out to check for subnormal result.
479 // Due to subnormals, x^-y == (1/x)^y does not hold in all cases.
480 __ xorps(double_scratch2, double_scratch2);
481 __ ucomisd(double_scratch2, double_result);
482 // double_exponent aliased as double_scratch2 has already been overwritten
483 // and may not have contained the exponent value in the first place when the
484 // input was a smi. We reset it with exponent value before bailing out.
485 __ j(not_equal, &done);
486 __ Cvtlsi2sd(double_exponent, exponent);
488 // Returning or bailing out.
489 Counters* counters = isolate()->counters();
490 if (exponent_type() == ON_STACK) {
491 // The arguments are still on the stack.
492 __ bind(&call_runtime);
493 __ TailCallRuntime(Runtime::kMathPowRT, 2, 1);
495 // The stub is called from non-optimized code, which expects the result
496 // as heap number in rax.
498 __ AllocateHeapNumber(rax, rcx, &call_runtime);
499 __ movsd(FieldOperand(rax, HeapNumber::kValueOffset), double_result);
500 __ IncrementCounter(counters->math_pow(), 1);
501 __ ret(2 * kPointerSize);
503 __ bind(&call_runtime);
504 // Move base to the correct argument register. Exponent is already in xmm1.
505 __ movsd(xmm0, double_base);
506 DCHECK(double_exponent.is(xmm1));
508 AllowExternalCallThatCantCauseGC scope(masm);
509 __ PrepareCallCFunction(2);
511 ExternalReference::power_double_double_function(isolate()), 2);
513 // Return value is in xmm0.
514 __ movsd(double_result, xmm0);
517 __ IncrementCounter(counters->math_pow(), 1);
523 void FunctionPrototypeStub::Generate(MacroAssembler* masm) {
525 Register receiver = LoadDescriptor::ReceiverRegister();
526 // Ensure that the vector and slot registers won't be clobbered before
527 // calling the miss handler.
528 DCHECK(!AreAliased(r8, r9, LoadWithVectorDescriptor::VectorRegister(),
529 LoadDescriptor::SlotRegister()));
531 NamedLoadHandlerCompiler::GenerateLoadFunctionPrototype(masm, receiver, r8,
534 PropertyAccessCompiler::TailCallBuiltin(
535 masm, PropertyAccessCompiler::MissBuiltin(Code::LOAD_IC));
539 void ArgumentsAccessStub::GenerateReadElement(MacroAssembler* masm) {
540 // The key is in rdx and the parameter count is in rax.
541 DCHECK(rdx.is(ArgumentsAccessReadDescriptor::index()));
542 DCHECK(rax.is(ArgumentsAccessReadDescriptor::parameter_count()));
544 // Check that the key is a smi.
546 __ JumpIfNotSmi(rdx, &slow);
548 // Check if the calling frame is an arguments adaptor frame. We look at the
549 // context offset, and if the frame is not a regular one, then we find a
550 // Smi instead of the context. We can't use SmiCompare here, because that
551 // only works for comparing two smis.
553 __ movp(rbx, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
554 __ Cmp(Operand(rbx, StandardFrameConstants::kContextOffset),
555 Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
556 __ j(equal, &adaptor);
558 // Check index against formal parameters count limit passed in
559 // through register rax. Use unsigned comparison to get negative
562 __ j(above_equal, &slow);
564 // Read the argument from the stack and return it.
565 __ SmiSub(rax, rax, rdx);
566 __ SmiToInteger32(rax, rax);
567 StackArgumentsAccessor args(rbp, rax, ARGUMENTS_DONT_CONTAIN_RECEIVER);
568 __ movp(rax, args.GetArgumentOperand(0));
571 // Arguments adaptor case: Check index against actual arguments
572 // limit found in the arguments adaptor frame. Use unsigned
573 // comparison to get negative check for free.
575 __ movp(rcx, Operand(rbx, ArgumentsAdaptorFrameConstants::kLengthOffset));
577 __ j(above_equal, &slow);
579 // Read the argument from the stack and return it.
580 __ SmiSub(rcx, rcx, rdx);
581 __ SmiToInteger32(rcx, rcx);
582 StackArgumentsAccessor adaptor_args(rbx, rcx,
583 ARGUMENTS_DONT_CONTAIN_RECEIVER);
584 __ movp(rax, adaptor_args.GetArgumentOperand(0));
587 // Slow-case: Handle non-smi or out-of-bounds access to arguments
588 // by calling the runtime system.
590 __ PopReturnAddressTo(rbx);
592 __ PushReturnAddressFrom(rbx);
593 __ TailCallRuntime(Runtime::kArguments, 1, 1);
597 void ArgumentsAccessStub::GenerateNewSloppyFast(MacroAssembler* masm) {
599 // rsp[0] : return address
600 // rsp[8] : number of parameters (tagged)
601 // rsp[16] : receiver displacement
602 // rsp[24] : function
603 // Registers used over the whole function:
604 // rbx: the mapped parameter count (untagged)
605 // rax: the allocated object (tagged).
606 Factory* factory = isolate()->factory();
608 StackArgumentsAccessor args(rsp, 3, ARGUMENTS_DONT_CONTAIN_RECEIVER);
609 __ SmiToInteger64(rbx, args.GetArgumentOperand(2));
610 // rbx = parameter count (untagged)
612 // Check if the calling frame is an arguments adaptor frame.
614 Label adaptor_frame, try_allocate;
615 __ movp(rdx, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
616 __ movp(rcx, Operand(rdx, StandardFrameConstants::kContextOffset));
617 __ Cmp(rcx, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
618 __ j(equal, &adaptor_frame);
620 // No adaptor, parameter count = argument count.
622 __ jmp(&try_allocate, Label::kNear);
624 // We have an adaptor frame. Patch the parameters pointer.
625 __ bind(&adaptor_frame);
626 __ SmiToInteger64(rcx,
628 ArgumentsAdaptorFrameConstants::kLengthOffset));
629 __ leap(rdx, Operand(rdx, rcx, times_pointer_size,
630 StandardFrameConstants::kCallerSPOffset));
631 __ movp(args.GetArgumentOperand(1), rdx);
633 // rbx = parameter count (untagged)
634 // rcx = argument count (untagged)
635 // Compute the mapped parameter count = min(rbx, rcx) in rbx.
637 __ j(less_equal, &try_allocate, Label::kNear);
640 __ bind(&try_allocate);
642 // Compute the sizes of backing store, parameter map, and arguments object.
643 // 1. Parameter map, has 2 extra words containing context and backing store.
644 const int kParameterMapHeaderSize =
645 FixedArray::kHeaderSize + 2 * kPointerSize;
646 Label no_parameter_map;
649 __ j(zero, &no_parameter_map, Label::kNear);
650 __ leap(r8, Operand(rbx, times_pointer_size, kParameterMapHeaderSize));
651 __ bind(&no_parameter_map);
654 __ leap(r8, Operand(r8, rcx, times_pointer_size, FixedArray::kHeaderSize));
656 // 3. Arguments object.
657 __ addp(r8, Immediate(Heap::kSloppyArgumentsObjectSize));
659 // Do the allocation of all three objects in one go.
660 __ Allocate(r8, rax, rdx, rdi, &runtime, TAG_OBJECT);
662 // rax = address of new object(s) (tagged)
663 // rcx = argument count (untagged)
664 // Get the arguments map from the current native context into rdi.
665 Label has_mapped_parameters, instantiate;
666 __ movp(rdi, Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
667 __ movp(rdi, FieldOperand(rdi, GlobalObject::kNativeContextOffset));
669 __ j(not_zero, &has_mapped_parameters, Label::kNear);
671 const int kIndex = Context::SLOPPY_ARGUMENTS_MAP_INDEX;
672 __ movp(rdi, Operand(rdi, Context::SlotOffset(kIndex)));
673 __ jmp(&instantiate, Label::kNear);
675 const int kAliasedIndex = Context::FAST_ALIASED_ARGUMENTS_MAP_INDEX;
676 __ bind(&has_mapped_parameters);
677 __ movp(rdi, Operand(rdi, Context::SlotOffset(kAliasedIndex)));
678 __ bind(&instantiate);
680 // rax = address of new object (tagged)
681 // rbx = mapped parameter count (untagged)
682 // rcx = argument count (untagged)
683 // rdi = address of arguments map (tagged)
684 __ movp(FieldOperand(rax, JSObject::kMapOffset), rdi);
685 __ LoadRoot(kScratchRegister, Heap::kEmptyFixedArrayRootIndex);
686 __ movp(FieldOperand(rax, JSObject::kPropertiesOffset), kScratchRegister);
687 __ movp(FieldOperand(rax, JSObject::kElementsOffset), kScratchRegister);
689 // Set up the callee in-object property.
690 STATIC_ASSERT(Heap::kArgumentsCalleeIndex == 1);
691 __ movp(rdx, args.GetArgumentOperand(0));
692 __ AssertNotSmi(rdx);
693 __ movp(FieldOperand(rax, JSObject::kHeaderSize +
694 Heap::kArgumentsCalleeIndex * kPointerSize),
697 // Use the length (smi tagged) and set that as an in-object property too.
698 // Note: rcx is tagged from here on.
699 STATIC_ASSERT(Heap::kArgumentsLengthIndex == 0);
700 __ Integer32ToSmi(rcx, rcx);
701 __ movp(FieldOperand(rax, JSObject::kHeaderSize +
702 Heap::kArgumentsLengthIndex * kPointerSize),
705 // Set up the elements pointer in the allocated arguments object.
706 // If we allocated a parameter map, edi will point there, otherwise to the
708 __ leap(rdi, Operand(rax, Heap::kSloppyArgumentsObjectSize));
709 __ movp(FieldOperand(rax, JSObject::kElementsOffset), rdi);
711 // rax = address of new object (tagged)
712 // rbx = mapped parameter count (untagged)
713 // rcx = argument count (tagged)
714 // rdi = address of parameter map or backing store (tagged)
716 // Initialize parameter map. If there are no mapped arguments, we're done.
717 Label skip_parameter_map;
719 __ j(zero, &skip_parameter_map);
721 __ LoadRoot(kScratchRegister, Heap::kSloppyArgumentsElementsMapRootIndex);
722 // rbx contains the untagged argument count. Add 2 and tag to write.
723 __ movp(FieldOperand(rdi, FixedArray::kMapOffset), kScratchRegister);
724 __ Integer64PlusConstantToSmi(r9, rbx, 2);
725 __ movp(FieldOperand(rdi, FixedArray::kLengthOffset), r9);
726 __ movp(FieldOperand(rdi, FixedArray::kHeaderSize + 0 * kPointerSize), rsi);
727 __ leap(r9, Operand(rdi, rbx, times_pointer_size, kParameterMapHeaderSize));
728 __ movp(FieldOperand(rdi, FixedArray::kHeaderSize + 1 * kPointerSize), r9);
730 // Copy the parameter slots and the holes in the arguments.
731 // We need to fill in mapped_parameter_count slots. They index the context,
732 // where parameters are stored in reverse order, at
733 // MIN_CONTEXT_SLOTS .. MIN_CONTEXT_SLOTS+parameter_count-1
734 // The mapped parameter thus need to get indices
735 // MIN_CONTEXT_SLOTS+parameter_count-1 ..
736 // MIN_CONTEXT_SLOTS+parameter_count-mapped_parameter_count
737 // We loop from right to left.
738 Label parameters_loop, parameters_test;
740 // Load tagged parameter count into r9.
741 __ Integer32ToSmi(r9, rbx);
742 __ Move(r8, Smi::FromInt(Context::MIN_CONTEXT_SLOTS));
743 __ addp(r8, args.GetArgumentOperand(2));
745 __ Move(r11, factory->the_hole_value());
747 __ leap(rdi, Operand(rdi, rbx, times_pointer_size, kParameterMapHeaderSize));
748 // r9 = loop variable (tagged)
749 // r8 = mapping index (tagged)
750 // r11 = the hole value
751 // rdx = address of parameter map (tagged)
752 // rdi = address of backing store (tagged)
753 __ jmp(¶meters_test, Label::kNear);
755 __ bind(¶meters_loop);
756 __ SmiSubConstant(r9, r9, Smi::FromInt(1));
757 __ SmiToInteger64(kScratchRegister, r9);
758 __ movp(FieldOperand(rdx, kScratchRegister,
760 kParameterMapHeaderSize),
762 __ movp(FieldOperand(rdi, kScratchRegister,
764 FixedArray::kHeaderSize),
766 __ SmiAddConstant(r8, r8, Smi::FromInt(1));
767 __ bind(¶meters_test);
769 __ j(not_zero, ¶meters_loop, Label::kNear);
771 __ bind(&skip_parameter_map);
773 // rcx = argument count (tagged)
774 // rdi = address of backing store (tagged)
775 // Copy arguments header and remaining slots (if there are any).
776 __ Move(FieldOperand(rdi, FixedArray::kMapOffset),
777 factory->fixed_array_map());
778 __ movp(FieldOperand(rdi, FixedArray::kLengthOffset), rcx);
780 Label arguments_loop, arguments_test;
782 __ movp(rdx, args.GetArgumentOperand(1));
783 // Untag rcx for the loop below.
784 __ SmiToInteger64(rcx, rcx);
785 __ leap(kScratchRegister, Operand(r8, times_pointer_size, 0));
786 __ subp(rdx, kScratchRegister);
787 __ jmp(&arguments_test, Label::kNear);
789 __ bind(&arguments_loop);
790 __ subp(rdx, Immediate(kPointerSize));
791 __ movp(r9, Operand(rdx, 0));
792 __ movp(FieldOperand(rdi, r8,
794 FixedArray::kHeaderSize),
796 __ addp(r8, Immediate(1));
798 __ bind(&arguments_test);
800 __ j(less, &arguments_loop, Label::kNear);
802 // Return and remove the on-stack parameters.
803 __ ret(3 * kPointerSize);
805 // Do the runtime call to allocate the arguments object.
806 // rcx = argument count (untagged)
808 __ Integer32ToSmi(rcx, rcx);
809 __ movp(args.GetArgumentOperand(2), rcx); // Patch argument count.
810 __ TailCallRuntime(Runtime::kNewSloppyArguments, 3, 1);
814 void ArgumentsAccessStub::GenerateNewSloppySlow(MacroAssembler* masm) {
815 // rsp[0] : return address
816 // rsp[8] : number of parameters
817 // rsp[16] : receiver displacement
818 // rsp[24] : function
820 // Check if the calling frame is an arguments adaptor frame.
822 __ movp(rdx, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
823 __ movp(rcx, Operand(rdx, StandardFrameConstants::kContextOffset));
824 __ Cmp(rcx, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
825 __ j(not_equal, &runtime);
827 // Patch the arguments.length and the parameters pointer.
828 StackArgumentsAccessor args(rsp, 3, ARGUMENTS_DONT_CONTAIN_RECEIVER);
829 __ movp(rcx, Operand(rdx, ArgumentsAdaptorFrameConstants::kLengthOffset));
830 __ movp(args.GetArgumentOperand(2), rcx);
831 __ SmiToInteger64(rcx, rcx);
832 __ leap(rdx, Operand(rdx, rcx, times_pointer_size,
833 StandardFrameConstants::kCallerSPOffset));
834 __ movp(args.GetArgumentOperand(1), rdx);
837 __ TailCallRuntime(Runtime::kNewSloppyArguments, 3, 1);
841 void RestParamAccessStub::GenerateNew(MacroAssembler* masm) {
842 // rsp[0] : return address
843 // rsp[8] : language mode
844 // rsp[16] : index of rest parameter
845 // rsp[24] : number of parameters
846 // rsp[32] : receiver displacement
848 // Check if the calling frame is an arguments adaptor frame.
850 __ movp(rdx, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
851 __ movp(rcx, Operand(rdx, StandardFrameConstants::kContextOffset));
852 __ Cmp(rcx, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
853 __ j(not_equal, &runtime);
855 // Patch the arguments.length and the parameters pointer.
856 StackArgumentsAccessor args(rsp, 4, ARGUMENTS_DONT_CONTAIN_RECEIVER);
857 __ movp(rcx, Operand(rdx, ArgumentsAdaptorFrameConstants::kLengthOffset));
858 __ movp(args.GetArgumentOperand(1), rcx);
859 __ SmiToInteger64(rcx, rcx);
860 __ leap(rdx, Operand(rdx, rcx, times_pointer_size,
861 StandardFrameConstants::kCallerSPOffset));
862 __ movp(args.GetArgumentOperand(0), rdx);
865 __ TailCallRuntime(Runtime::kNewRestParam, 4, 1);
869 void LoadIndexedInterceptorStub::Generate(MacroAssembler* masm) {
870 // Return address is on the stack.
873 Register receiver = LoadDescriptor::ReceiverRegister();
874 Register key = LoadDescriptor::NameRegister();
875 Register scratch = rax;
876 DCHECK(!scratch.is(receiver) && !scratch.is(key));
878 // Check that the key is an array index, that is Uint32.
879 STATIC_ASSERT(kSmiValueSize <= 32);
880 __ JumpUnlessNonNegativeSmi(key, &slow);
882 // Everything is fine, call runtime.
883 __ PopReturnAddressTo(scratch);
884 __ Push(receiver); // receiver
886 __ PushReturnAddressFrom(scratch);
888 // Perform tail call to the entry.
889 __ TailCallRuntime(Runtime::kLoadElementWithInterceptor, 2, 1);
892 PropertyAccessCompiler::TailCallBuiltin(
893 masm, PropertyAccessCompiler::MissBuiltin(Code::KEYED_LOAD_IC));
897 void LoadIndexedStringStub::Generate(MacroAssembler* masm) {
898 // Return address is on the stack.
901 Register receiver = LoadDescriptor::ReceiverRegister();
902 Register index = LoadDescriptor::NameRegister();
903 Register scratch = rdi;
904 Register result = rax;
905 DCHECK(!scratch.is(receiver) && !scratch.is(index));
906 DCHECK(!scratch.is(LoadWithVectorDescriptor::VectorRegister()) &&
907 result.is(LoadDescriptor::SlotRegister()));
909 // StringCharAtGenerator doesn't use the result register until it's passed
910 // the different miss possibilities. If it did, we would have a conflict
911 // when FLAG_vector_ics is true.
912 StringCharAtGenerator char_at_generator(receiver, index, scratch, result,
913 &miss, // When not a string.
914 &miss, // When not a number.
915 &miss, // When index out of range.
916 STRING_INDEX_IS_ARRAY_INDEX,
918 char_at_generator.GenerateFast(masm);
921 StubRuntimeCallHelper call_helper;
922 char_at_generator.GenerateSlow(masm, PART_OF_IC_HANDLER, call_helper);
925 PropertyAccessCompiler::TailCallBuiltin(
926 masm, PropertyAccessCompiler::MissBuiltin(Code::KEYED_LOAD_IC));
930 void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) {
931 // rsp[0] : return address
932 // rsp[8] : number of parameters
933 // rsp[16] : receiver displacement
934 // rsp[24] : function
936 // Check if the calling frame is an arguments adaptor frame.
937 Label adaptor_frame, try_allocate, runtime;
938 __ movp(rdx, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
939 __ movp(rcx, Operand(rdx, StandardFrameConstants::kContextOffset));
940 __ Cmp(rcx, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
941 __ j(equal, &adaptor_frame);
943 // Get the length from the frame.
944 StackArgumentsAccessor args(rsp, 3, ARGUMENTS_DONT_CONTAIN_RECEIVER);
945 __ movp(rcx, args.GetArgumentOperand(2));
946 __ SmiToInteger64(rcx, rcx);
947 __ jmp(&try_allocate);
949 // Patch the arguments.length and the parameters pointer.
950 __ bind(&adaptor_frame);
951 __ movp(rcx, Operand(rdx, ArgumentsAdaptorFrameConstants::kLengthOffset));
953 __ movp(args.GetArgumentOperand(2), rcx);
954 __ SmiToInteger64(rcx, rcx);
955 __ leap(rdx, Operand(rdx, rcx, times_pointer_size,
956 StandardFrameConstants::kCallerSPOffset));
957 __ movp(args.GetArgumentOperand(1), rdx);
959 // Try the new space allocation. Start out with computing the size of
960 // the arguments object and the elements array.
961 Label add_arguments_object;
962 __ bind(&try_allocate);
964 __ j(zero, &add_arguments_object, Label::kNear);
965 __ leap(rcx, Operand(rcx, times_pointer_size, FixedArray::kHeaderSize));
966 __ bind(&add_arguments_object);
967 __ addp(rcx, Immediate(Heap::kStrictArgumentsObjectSize));
969 // Do the allocation of both objects in one go.
970 __ Allocate(rcx, rax, rdx, rbx, &runtime, TAG_OBJECT);
972 // Get the arguments map from the current native context.
973 __ movp(rdi, Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
974 __ movp(rdi, FieldOperand(rdi, GlobalObject::kNativeContextOffset));
975 const int offset = Context::SlotOffset(Context::STRICT_ARGUMENTS_MAP_INDEX);
976 __ movp(rdi, Operand(rdi, offset));
978 __ movp(FieldOperand(rax, JSObject::kMapOffset), rdi);
979 __ LoadRoot(kScratchRegister, Heap::kEmptyFixedArrayRootIndex);
980 __ movp(FieldOperand(rax, JSObject::kPropertiesOffset), kScratchRegister);
981 __ movp(FieldOperand(rax, JSObject::kElementsOffset), kScratchRegister);
983 // Get the length (smi tagged) and set that as an in-object property too.
984 STATIC_ASSERT(Heap::kArgumentsLengthIndex == 0);
985 __ movp(rcx, args.GetArgumentOperand(2));
986 __ movp(FieldOperand(rax, JSObject::kHeaderSize +
987 Heap::kArgumentsLengthIndex * kPointerSize),
990 // If there are no actual arguments, we're done.
995 // Get the parameters pointer from the stack.
996 __ movp(rdx, args.GetArgumentOperand(1));
998 // Set up the elements pointer in the allocated arguments object and
999 // initialize the header in the elements fixed array.
1000 __ leap(rdi, Operand(rax, Heap::kStrictArgumentsObjectSize));
1001 __ movp(FieldOperand(rax, JSObject::kElementsOffset), rdi);
1002 __ LoadRoot(kScratchRegister, Heap::kFixedArrayMapRootIndex);
1003 __ movp(FieldOperand(rdi, FixedArray::kMapOffset), kScratchRegister);
1006 __ movp(FieldOperand(rdi, FixedArray::kLengthOffset), rcx);
1007 // Untag the length for the loop below.
1008 __ SmiToInteger64(rcx, rcx);
1010 // Copy the fixed array slots.
1013 __ movp(rbx, Operand(rdx, -1 * kPointerSize)); // Skip receiver.
1014 __ movp(FieldOperand(rdi, FixedArray::kHeaderSize), rbx);
1015 __ addp(rdi, Immediate(kPointerSize));
1016 __ subp(rdx, Immediate(kPointerSize));
1018 __ j(not_zero, &loop);
1020 // Return and remove the on-stack parameters.
1022 __ ret(3 * kPointerSize);
1024 // Do the runtime call to allocate the arguments object.
1026 __ TailCallRuntime(Runtime::kNewStrictArguments, 3, 1);
1030 void RegExpExecStub::Generate(MacroAssembler* masm) {
1031 // Just jump directly to runtime if native RegExp is not selected at compile
1032 // time or if regexp entry in generated code is turned off runtime switch or
1034 #ifdef V8_INTERPRETED_REGEXP
1035 __ TailCallRuntime(Runtime::kRegExpExec, 4, 1);
1036 #else // V8_INTERPRETED_REGEXP
1038 // Stack frame on entry.
1039 // rsp[0] : return address
1040 // rsp[8] : last_match_info (expected JSArray)
1041 // rsp[16] : previous index
1042 // rsp[24] : subject string
1043 // rsp[32] : JSRegExp object
1045 enum RegExpExecStubArgumentIndices {
1046 JS_REG_EXP_OBJECT_ARGUMENT_INDEX,
1047 SUBJECT_STRING_ARGUMENT_INDEX,
1048 PREVIOUS_INDEX_ARGUMENT_INDEX,
1049 LAST_MATCH_INFO_ARGUMENT_INDEX,
1050 REG_EXP_EXEC_ARGUMENT_COUNT
1053 StackArgumentsAccessor args(rsp, REG_EXP_EXEC_ARGUMENT_COUNT,
1054 ARGUMENTS_DONT_CONTAIN_RECEIVER);
1056 // Ensure that a RegExp stack is allocated.
1057 ExternalReference address_of_regexp_stack_memory_address =
1058 ExternalReference::address_of_regexp_stack_memory_address(isolate());
1059 ExternalReference address_of_regexp_stack_memory_size =
1060 ExternalReference::address_of_regexp_stack_memory_size(isolate());
1061 __ Load(kScratchRegister, address_of_regexp_stack_memory_size);
1062 __ testp(kScratchRegister, kScratchRegister);
1063 __ j(zero, &runtime);
1065 // Check that the first argument is a JSRegExp object.
1066 __ movp(rax, args.GetArgumentOperand(JS_REG_EXP_OBJECT_ARGUMENT_INDEX));
1067 __ JumpIfSmi(rax, &runtime);
1068 __ CmpObjectType(rax, JS_REGEXP_TYPE, kScratchRegister);
1069 __ j(not_equal, &runtime);
1071 // Check that the RegExp has been compiled (data contains a fixed array).
1072 __ movp(rax, FieldOperand(rax, JSRegExp::kDataOffset));
1073 if (FLAG_debug_code) {
1074 Condition is_smi = masm->CheckSmi(rax);
1075 __ Check(NegateCondition(is_smi),
1076 kUnexpectedTypeForRegExpDataFixedArrayExpected);
1077 __ CmpObjectType(rax, FIXED_ARRAY_TYPE, kScratchRegister);
1078 __ Check(equal, kUnexpectedTypeForRegExpDataFixedArrayExpected);
1081 // rax: RegExp data (FixedArray)
1082 // Check the type of the RegExp. Only continue if type is JSRegExp::IRREGEXP.
1083 __ SmiToInteger32(rbx, FieldOperand(rax, JSRegExp::kDataTagOffset));
1084 __ cmpl(rbx, Immediate(JSRegExp::IRREGEXP));
1085 __ j(not_equal, &runtime);
1087 // rax: RegExp data (FixedArray)
1088 // Check that the number of captures fit in the static offsets vector buffer.
1089 __ SmiToInteger32(rdx,
1090 FieldOperand(rax, JSRegExp::kIrregexpCaptureCountOffset));
1091 // Check (number_of_captures + 1) * 2 <= offsets vector size
1092 // Or number_of_captures <= offsets vector size / 2 - 1
1093 STATIC_ASSERT(Isolate::kJSRegexpStaticOffsetsVectorSize >= 2);
1094 __ cmpl(rdx, Immediate(Isolate::kJSRegexpStaticOffsetsVectorSize / 2 - 1));
1095 __ j(above, &runtime);
1097 // Reset offset for possibly sliced string.
1099 __ movp(rdi, args.GetArgumentOperand(SUBJECT_STRING_ARGUMENT_INDEX));
1100 __ JumpIfSmi(rdi, &runtime);
1101 __ movp(r15, rdi); // Make a copy of the original subject string.
1102 __ movp(rbx, FieldOperand(rdi, HeapObject::kMapOffset));
1103 __ movzxbl(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset));
1104 // rax: RegExp data (FixedArray)
1105 // rdi: subject string
1106 // r15: subject string
1107 // Handle subject string according to its encoding and representation:
1108 // (1) Sequential two byte? If yes, go to (9).
1109 // (2) Sequential one byte? If yes, go to (6).
1110 // (3) Anything but sequential or cons? If yes, go to (7).
1111 // (4) Cons string. If the string is flat, replace subject with first string.
1112 // Otherwise bailout.
1113 // (5a) Is subject sequential two byte? If yes, go to (9).
1114 // (5b) Is subject external? If yes, go to (8).
1115 // (6) One byte sequential. Load regexp code for one byte.
1119 // Deferred code at the end of the stub:
1120 // (7) Not a long external string? If yes, go to (10).
1121 // (8) External string. Make it, offset-wise, look like a sequential string.
1122 // (8a) Is the external string one byte? If yes, go to (6).
1123 // (9) Two byte sequential. Load regexp code for one byte. Go to (E).
1124 // (10) Short external string or not a string? If yes, bail out to runtime.
1125 // (11) Sliced string. Replace subject with parent. Go to (5a).
1127 Label seq_one_byte_string /* 6 */, seq_two_byte_string /* 9 */,
1128 external_string /* 8 */, check_underlying /* 5a */,
1129 not_seq_nor_cons /* 7 */, check_code /* E */,
1130 not_long_external /* 10 */;
1132 // (1) Sequential two byte? If yes, go to (9).
1133 __ andb(rbx, Immediate(kIsNotStringMask |
1134 kStringRepresentationMask |
1135 kStringEncodingMask |
1136 kShortExternalStringMask));
1137 STATIC_ASSERT((kStringTag | kSeqStringTag | kTwoByteStringTag) == 0);
1138 __ j(zero, &seq_two_byte_string); // Go to (9).
1140 // (2) Sequential one byte? If yes, go to (6).
1141 // Any other sequential string must be one byte.
1142 __ andb(rbx, Immediate(kIsNotStringMask |
1143 kStringRepresentationMask |
1144 kShortExternalStringMask));
1145 __ j(zero, &seq_one_byte_string, Label::kNear); // Go to (6).
1147 // (3) Anything but sequential or cons? If yes, go to (7).
1148 // We check whether the subject string is a cons, since sequential strings
1149 // have already been covered.
1150 STATIC_ASSERT(kConsStringTag < kExternalStringTag);
1151 STATIC_ASSERT(kSlicedStringTag > kExternalStringTag);
1152 STATIC_ASSERT(kIsNotStringMask > kExternalStringTag);
1153 STATIC_ASSERT(kShortExternalStringTag > kExternalStringTag);
1154 __ cmpp(rbx, Immediate(kExternalStringTag));
1155 __ j(greater_equal, ¬_seq_nor_cons); // Go to (7).
1157 // (4) Cons string. Check that it's flat.
1158 // Replace subject with first string and reload instance type.
1159 __ CompareRoot(FieldOperand(rdi, ConsString::kSecondOffset),
1160 Heap::kempty_stringRootIndex);
1161 __ j(not_equal, &runtime);
1162 __ movp(rdi, FieldOperand(rdi, ConsString::kFirstOffset));
1163 __ bind(&check_underlying);
1164 __ movp(rbx, FieldOperand(rdi, HeapObject::kMapOffset));
1165 __ movp(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset));
1167 // (5a) Is subject sequential two byte? If yes, go to (9).
1168 __ testb(rbx, Immediate(kStringRepresentationMask | kStringEncodingMask));
1169 STATIC_ASSERT((kSeqStringTag | kTwoByteStringTag) == 0);
1170 __ j(zero, &seq_two_byte_string); // Go to (9).
1171 // (5b) Is subject external? If yes, go to (8).
1172 __ testb(rbx, Immediate(kStringRepresentationMask));
1173 // The underlying external string is never a short external string.
1174 STATIC_ASSERT(ExternalString::kMaxShortLength < ConsString::kMinLength);
1175 STATIC_ASSERT(ExternalString::kMaxShortLength < SlicedString::kMinLength);
1176 __ j(not_zero, &external_string); // Go to (8)
1178 // (6) One byte sequential. Load regexp code for one byte.
1179 __ bind(&seq_one_byte_string);
1180 // rax: RegExp data (FixedArray)
1181 __ movp(r11, FieldOperand(rax, JSRegExp::kDataOneByteCodeOffset));
1182 __ Set(rcx, 1); // Type is one byte.
1184 // (E) Carry on. String handling is done.
1185 __ bind(&check_code);
1186 // r11: irregexp code
1187 // Check that the irregexp code has been generated for the actual string
1188 // encoding. If it has, the field contains a code object otherwise it contains
1189 // smi (code flushing support)
1190 __ JumpIfSmi(r11, &runtime);
1192 // rdi: sequential subject string (or look-alike, external string)
1193 // r15: original subject string
1194 // rcx: encoding of subject string (1 if one_byte, 0 if two_byte);
1196 // Load used arguments before starting to push arguments for call to native
1197 // RegExp code to avoid handling changing stack height.
1198 // We have to use r15 instead of rdi to load the length because rdi might
1199 // have been only made to look like a sequential string when it actually
1200 // is an external string.
1201 __ movp(rbx, args.GetArgumentOperand(PREVIOUS_INDEX_ARGUMENT_INDEX));
1202 __ JumpIfNotSmi(rbx, &runtime);
1203 __ SmiCompare(rbx, FieldOperand(r15, String::kLengthOffset));
1204 __ j(above_equal, &runtime);
1205 __ SmiToInteger64(rbx, rbx);
1207 // rdi: subject string
1208 // rbx: previous index
1209 // rcx: encoding of subject string (1 if one_byte 0 if two_byte);
1211 // All checks done. Now push arguments for native regexp code.
1212 Counters* counters = isolate()->counters();
1213 __ IncrementCounter(counters->regexp_entry_native(), 1);
1215 // Isolates: note we add an additional parameter here (isolate pointer).
1216 static const int kRegExpExecuteArguments = 9;
1217 int argument_slots_on_stack =
1218 masm->ArgumentStackSlotsForCFunctionCall(kRegExpExecuteArguments);
1219 __ EnterApiExitFrame(argument_slots_on_stack);
1221 // Argument 9: Pass current isolate address.
1222 __ LoadAddress(kScratchRegister,
1223 ExternalReference::isolate_address(isolate()));
1224 __ movq(Operand(rsp, (argument_slots_on_stack - 1) * kRegisterSize),
1227 // Argument 8: Indicate that this is a direct call from JavaScript.
1228 __ movq(Operand(rsp, (argument_slots_on_stack - 2) * kRegisterSize),
1231 // Argument 7: Start (high end) of backtracking stack memory area.
1232 __ Move(kScratchRegister, address_of_regexp_stack_memory_address);
1233 __ movp(r9, Operand(kScratchRegister, 0));
1234 __ Move(kScratchRegister, address_of_regexp_stack_memory_size);
1235 __ addp(r9, Operand(kScratchRegister, 0));
1236 __ movq(Operand(rsp, (argument_slots_on_stack - 3) * kRegisterSize), r9);
1238 // Argument 6: Set the number of capture registers to zero to force global
1239 // regexps to behave as non-global. This does not affect non-global regexps.
1240 // Argument 6 is passed in r9 on Linux and on the stack on Windows.
1242 __ movq(Operand(rsp, (argument_slots_on_stack - 4) * kRegisterSize),
1248 // Argument 5: static offsets vector buffer.
1250 r8, ExternalReference::address_of_static_offsets_vector(isolate()));
1251 // Argument 5 passed in r8 on Linux and on the stack on Windows.
1253 __ movq(Operand(rsp, (argument_slots_on_stack - 5) * kRegisterSize), r8);
1256 // rdi: subject string
1257 // rbx: previous index
1258 // rcx: encoding of subject string (1 if one_byte 0 if two_byte);
1260 // r14: slice offset
1261 // r15: original subject string
1263 // Argument 2: Previous index.
1264 __ movp(arg_reg_2, rbx);
1266 // Argument 4: End of string data
1267 // Argument 3: Start of string data
1268 Label setup_two_byte, setup_rest, got_length, length_not_from_slice;
1269 // Prepare start and end index of the input.
1270 // Load the length from the original sliced string if that is the case.
1272 __ SmiToInteger32(arg_reg_3, FieldOperand(r15, String::kLengthOffset));
1273 __ addp(r14, arg_reg_3); // Using arg3 as scratch.
1275 // rbx: start index of the input
1276 // r14: end index of the input
1277 // r15: original subject string
1278 __ testb(rcx, rcx); // Last use of rcx as encoding of subject string.
1279 __ j(zero, &setup_two_byte, Label::kNear);
1281 FieldOperand(rdi, r14, times_1, SeqOneByteString::kHeaderSize));
1283 FieldOperand(rdi, rbx, times_1, SeqOneByteString::kHeaderSize));
1284 __ jmp(&setup_rest, Label::kNear);
1285 __ bind(&setup_two_byte);
1287 FieldOperand(rdi, r14, times_2, SeqTwoByteString::kHeaderSize));
1289 FieldOperand(rdi, rbx, times_2, SeqTwoByteString::kHeaderSize));
1290 __ bind(&setup_rest);
1292 // Argument 1: Original subject string.
1293 // The original subject is in the previous stack frame. Therefore we have to
1294 // use rbp, which points exactly to one pointer size below the previous rsp.
1295 // (Because creating a new stack frame pushes the previous rbp onto the stack
1296 // and thereby moves up rsp by one kPointerSize.)
1297 __ movp(arg_reg_1, r15);
1299 // Locate the code entry and call it.
1300 __ addp(r11, Immediate(Code::kHeaderSize - kHeapObjectTag));
1303 __ LeaveApiExitFrame(true);
1305 // Check the result.
1308 __ cmpl(rax, Immediate(1));
1309 // We expect exactly one result since we force the called regexp to behave
1311 __ j(equal, &success, Label::kNear);
1312 __ cmpl(rax, Immediate(NativeRegExpMacroAssembler::EXCEPTION));
1313 __ j(equal, &exception);
1314 __ cmpl(rax, Immediate(NativeRegExpMacroAssembler::FAILURE));
1315 // If none of the above, it can only be retry.
1316 // Handle that in the runtime system.
1317 __ j(not_equal, &runtime);
1319 // For failure return null.
1320 __ LoadRoot(rax, Heap::kNullValueRootIndex);
1321 __ ret(REG_EXP_EXEC_ARGUMENT_COUNT * kPointerSize);
1323 // Load RegExp data.
1325 __ movp(rax, args.GetArgumentOperand(JS_REG_EXP_OBJECT_ARGUMENT_INDEX));
1326 __ movp(rcx, FieldOperand(rax, JSRegExp::kDataOffset));
1327 __ SmiToInteger32(rax,
1328 FieldOperand(rcx, JSRegExp::kIrregexpCaptureCountOffset));
1329 // Calculate number of capture registers (number_of_captures + 1) * 2.
1330 __ leal(rdx, Operand(rax, rax, times_1, 2));
1332 // rdx: Number of capture registers
1333 // Check that the fourth object is a JSArray object.
1334 __ movp(r15, args.GetArgumentOperand(LAST_MATCH_INFO_ARGUMENT_INDEX));
1335 __ JumpIfSmi(r15, &runtime);
1336 __ CmpObjectType(r15, JS_ARRAY_TYPE, kScratchRegister);
1337 __ j(not_equal, &runtime);
1338 // Check that the JSArray is in fast case.
1339 __ movp(rbx, FieldOperand(r15, JSArray::kElementsOffset));
1340 __ movp(rax, FieldOperand(rbx, HeapObject::kMapOffset));
1341 __ CompareRoot(rax, Heap::kFixedArrayMapRootIndex);
1342 __ j(not_equal, &runtime);
1343 // Check that the last match info has space for the capture registers and the
1344 // additional information. Ensure no overflow in add.
1345 STATIC_ASSERT(FixedArray::kMaxLength < kMaxInt - FixedArray::kLengthOffset);
1346 __ SmiToInteger32(rax, FieldOperand(rbx, FixedArray::kLengthOffset));
1347 __ subl(rax, Immediate(RegExpImpl::kLastMatchOverhead));
1349 __ j(greater, &runtime);
1351 // rbx: last_match_info backing store (FixedArray)
1352 // rdx: number of capture registers
1353 // Store the capture count.
1354 __ Integer32ToSmi(kScratchRegister, rdx);
1355 __ movp(FieldOperand(rbx, RegExpImpl::kLastCaptureCountOffset),
1357 // Store last subject and last input.
1358 __ movp(rax, args.GetArgumentOperand(SUBJECT_STRING_ARGUMENT_INDEX));
1359 __ movp(FieldOperand(rbx, RegExpImpl::kLastSubjectOffset), rax);
1361 __ RecordWriteField(rbx,
1362 RegExpImpl::kLastSubjectOffset,
1367 __ movp(FieldOperand(rbx, RegExpImpl::kLastInputOffset), rax);
1368 __ RecordWriteField(rbx,
1369 RegExpImpl::kLastInputOffset,
1374 // Get the static offsets vector filled by the native regexp code.
1376 rcx, ExternalReference::address_of_static_offsets_vector(isolate()));
1378 // rbx: last_match_info backing store (FixedArray)
1379 // rcx: offsets vector
1380 // rdx: number of capture registers
1381 Label next_capture, done;
1382 // Capture register counter starts from number of capture registers and
1383 // counts down until wraping after zero.
1384 __ bind(&next_capture);
1385 __ subp(rdx, Immediate(1));
1386 __ j(negative, &done, Label::kNear);
1387 // Read the value from the static offsets vector buffer and make it a smi.
1388 __ movl(rdi, Operand(rcx, rdx, times_int_size, 0));
1389 __ Integer32ToSmi(rdi, rdi);
1390 // Store the smi value in the last match info.
1391 __ movp(FieldOperand(rbx,
1394 RegExpImpl::kFirstCaptureOffset),
1396 __ jmp(&next_capture);
1399 // Return last match info.
1401 __ ret(REG_EXP_EXEC_ARGUMENT_COUNT * kPointerSize);
1403 __ bind(&exception);
1404 // Result must now be exception. If there is no pending exception already a
1405 // stack overflow (on the backtrack stack) was detected in RegExp code but
1406 // haven't created the exception yet. Handle that in the runtime system.
1407 // TODO(592): Rerunning the RegExp to get the stack overflow exception.
1408 ExternalReference pending_exception_address(
1409 Isolate::kPendingExceptionAddress, isolate());
1410 Operand pending_exception_operand =
1411 masm->ExternalOperand(pending_exception_address, rbx);
1412 __ movp(rax, pending_exception_operand);
1413 __ LoadRoot(rdx, Heap::kTheHoleValueRootIndex);
1415 __ j(equal, &runtime);
1417 // For exception, throw the exception again.
1418 __ TailCallRuntime(Runtime::kRegExpExecReThrow, 4, 1);
1420 // Do the runtime call to execute the regexp.
1422 __ TailCallRuntime(Runtime::kRegExpExec, 4, 1);
1424 // Deferred code for string handling.
1425 // (7) Not a long external string? If yes, go to (10).
1426 __ bind(¬_seq_nor_cons);
1427 // Compare flags are still set from (3).
1428 __ j(greater, ¬_long_external, Label::kNear); // Go to (10).
1430 // (8) External string. Short external strings have been ruled out.
1431 __ bind(&external_string);
1432 __ movp(rbx, FieldOperand(rdi, HeapObject::kMapOffset));
1433 __ movzxbl(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset));
1434 if (FLAG_debug_code) {
1435 // Assert that we do not have a cons or slice (indirect strings) here.
1436 // Sequential strings have already been ruled out.
1437 __ testb(rbx, Immediate(kIsIndirectStringMask));
1438 __ Assert(zero, kExternalStringExpectedButNotFound);
1440 __ movp(rdi, FieldOperand(rdi, ExternalString::kResourceDataOffset));
1441 // Move the pointer so that offset-wise, it looks like a sequential string.
1442 STATIC_ASSERT(SeqTwoByteString::kHeaderSize == SeqOneByteString::kHeaderSize);
1443 __ subp(rdi, Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
1444 STATIC_ASSERT(kTwoByteStringTag == 0);
1445 // (8a) Is the external string one byte? If yes, go to (6).
1446 __ testb(rbx, Immediate(kStringEncodingMask));
1447 __ j(not_zero, &seq_one_byte_string); // Goto (6).
1449 // rdi: subject string (flat two-byte)
1450 // rax: RegExp data (FixedArray)
1451 // (9) Two byte sequential. Load regexp code for one byte. Go to (E).
1452 __ bind(&seq_two_byte_string);
1453 __ movp(r11, FieldOperand(rax, JSRegExp::kDataUC16CodeOffset));
1454 __ Set(rcx, 0); // Type is two byte.
1455 __ jmp(&check_code); // Go to (E).
1457 // (10) Not a string or a short external string? If yes, bail out to runtime.
1458 __ bind(¬_long_external);
1459 // Catch non-string subject or short external string.
1460 STATIC_ASSERT(kNotStringTag != 0 && kShortExternalStringTag !=0);
1461 __ testb(rbx, Immediate(kIsNotStringMask | kShortExternalStringMask));
1462 __ j(not_zero, &runtime);
1464 // (11) Sliced string. Replace subject with parent. Go to (5a).
1465 // Load offset into r14 and replace subject string with parent.
1466 __ SmiToInteger32(r14, FieldOperand(rdi, SlicedString::kOffsetOffset));
1467 __ movp(rdi, FieldOperand(rdi, SlicedString::kParentOffset));
1468 __ jmp(&check_underlying);
1469 #endif // V8_INTERPRETED_REGEXP
1473 static int NegativeComparisonResult(Condition cc) {
1474 DCHECK(cc != equal);
1475 DCHECK((cc == less) || (cc == less_equal)
1476 || (cc == greater) || (cc == greater_equal));
1477 return (cc == greater || cc == greater_equal) ? LESS : GREATER;
1481 static void CheckInputType(MacroAssembler* masm, Register input,
1482 CompareICState::State expected, Label* fail) {
1484 if (expected == CompareICState::SMI) {
1485 __ JumpIfNotSmi(input, fail);
1486 } else if (expected == CompareICState::NUMBER) {
1487 __ JumpIfSmi(input, &ok);
1488 __ CompareMap(input, masm->isolate()->factory()->heap_number_map());
1489 __ j(not_equal, fail);
1491 // We could be strict about internalized/non-internalized here, but as long as
1492 // hydrogen doesn't care, the stub doesn't have to care either.
1497 static void BranchIfNotInternalizedString(MacroAssembler* masm,
1501 __ JumpIfSmi(object, label);
1502 __ movp(scratch, FieldOperand(object, HeapObject::kMapOffset));
1504 FieldOperand(scratch, Map::kInstanceTypeOffset));
1505 STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0);
1506 __ testb(scratch, Immediate(kIsNotStringMask | kIsNotInternalizedMask));
1507 __ j(not_zero, label);
1511 void CompareICStub::GenerateGeneric(MacroAssembler* masm) {
1512 Label runtime_call, check_unequal_objects, done;
1513 Condition cc = GetCondition();
1514 Factory* factory = isolate()->factory();
1517 CheckInputType(masm, rdx, left(), &miss);
1518 CheckInputType(masm, rax, right(), &miss);
1520 // Compare two smis.
1521 Label non_smi, smi_done;
1522 __ JumpIfNotBothSmi(rax, rdx, &non_smi);
1524 __ j(no_overflow, &smi_done);
1525 __ notp(rdx); // Correct sign in case of overflow. rdx cannot be 0 here.
1531 // The compare stub returns a positive, negative, or zero 64-bit integer
1532 // value in rax, corresponding to result of comparing the two inputs.
1533 // NOTICE! This code is only reached after a smi-fast-case check, so
1534 // it is certain that at least one operand isn't a smi.
1536 // Two identical objects are equal unless they are both NaN or undefined.
1538 Label not_identical;
1540 __ j(not_equal, ¬_identical, Label::kNear);
1543 // Check for undefined. undefined OP undefined is false even though
1544 // undefined == undefined.
1545 __ CompareRoot(rdx, Heap::kUndefinedValueRootIndex);
1546 if (is_strong(strength())) {
1547 // In strong mode, this comparison must throw, so call the runtime.
1548 __ j(equal, &runtime_call, Label::kFar);
1550 Label check_for_nan;
1551 __ j(not_equal, &check_for_nan, Label::kNear);
1552 __ Set(rax, NegativeComparisonResult(cc));
1554 __ bind(&check_for_nan);
1558 // Test for NaN. Sadly, we can't just compare to Factory::nan_value(),
1559 // so we do the second best thing - test it ourselves.
1561 // If it's not a heap number, then return equal for (in)equality operator.
1562 __ Cmp(FieldOperand(rdx, HeapObject::kMapOffset),
1563 factory->heap_number_map());
1564 __ j(equal, &heap_number, Label::kNear);
1566 __ movp(rcx, FieldOperand(rax, HeapObject::kMapOffset));
1567 __ movzxbl(rcx, FieldOperand(rcx, Map::kInstanceTypeOffset));
1568 // Call runtime on identical objects. Otherwise return equal.
1569 __ cmpb(rcx, Immediate(static_cast<uint8_t>(FIRST_SPEC_OBJECT_TYPE)));
1570 __ j(above_equal, &runtime_call, Label::kFar);
1571 // Call runtime on identical symbols since we need to throw a TypeError.
1572 __ cmpb(rcx, Immediate(static_cast<uint8_t>(SYMBOL_TYPE)));
1573 __ j(equal, &runtime_call, Label::kFar);
1574 // Call runtime on identical SIMD values since we must throw a TypeError.
1575 __ cmpb(rcx, Immediate(static_cast<uint8_t>(SIMD128_VALUE_TYPE)));
1576 __ j(equal, &runtime_call, Label::kFar);
1577 if (is_strong(strength())) {
1578 // We have already tested for smis and heap numbers, so if both
1579 // arguments are not strings we must proceed to the slow case.
1580 __ testb(rcx, Immediate(kIsNotStringMask));
1581 __ j(not_zero, &runtime_call, Label::kFar);
1587 __ bind(&heap_number);
1588 // It is a heap number, so return equal if it's not NaN.
1589 // For NaN, return 1 for every condition except greater and
1590 // greater-equal. Return -1 for them, so the comparison yields
1591 // false for all conditions except not-equal.
1593 __ movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset));
1594 __ ucomisd(xmm0, xmm0);
1595 __ setcc(parity_even, rax);
1596 // rax is 0 for equal non-NaN heapnumbers, 1 for NaNs.
1597 if (cc == greater_equal || cc == greater) {
1602 __ bind(¬_identical);
1605 if (cc == equal) { // Both strict and non-strict.
1606 Label slow; // Fallthrough label.
1608 // If we're doing a strict equality comparison, we don't have to do
1609 // type conversion, so we generate code to do fast comparison for objects
1610 // and oddballs. Non-smi numbers and strings still go through the usual
1613 // If either is a Smi (we know that not both are), then they can only
1614 // be equal if the other is a HeapNumber. If so, use the slow case.
1617 __ SelectNonSmi(rbx, rax, rdx, ¬_smis);
1619 // Check if the non-smi operand is a heap number.
1620 __ Cmp(FieldOperand(rbx, HeapObject::kMapOffset),
1621 factory->heap_number_map());
1622 // If heap number, handle it in the slow case.
1624 // Return non-equal. ebx (the lower half of rbx) is not zero.
1631 // If either operand is a JSObject or an oddball value, then they are not
1632 // equal since their pointers are different
1633 // There is no test for undetectability in strict equality.
1635 // If the first object is a JS object, we have done pointer comparison.
1636 STATIC_ASSERT(LAST_TYPE == LAST_SPEC_OBJECT_TYPE);
1637 Label first_non_object;
1638 __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rcx);
1639 __ j(below, &first_non_object, Label::kNear);
1640 // Return non-zero (rax (not rax) is not zero)
1641 Label return_not_equal;
1642 STATIC_ASSERT(kHeapObjectTag != 0);
1643 __ bind(&return_not_equal);
1646 __ bind(&first_non_object);
1647 // Check for oddballs: true, false, null, undefined.
1648 __ CmpInstanceType(rcx, ODDBALL_TYPE);
1649 __ j(equal, &return_not_equal);
1651 __ CmpObjectType(rdx, FIRST_SPEC_OBJECT_TYPE, rcx);
1652 __ j(above_equal, &return_not_equal);
1654 // Check for oddballs: true, false, null, undefined.
1655 __ CmpInstanceType(rcx, ODDBALL_TYPE);
1656 __ j(equal, &return_not_equal);
1658 // Fall through to the general case.
1663 // Generate the number comparison code.
1664 Label non_number_comparison;
1666 FloatingPointHelper::LoadSSE2UnknownOperands(masm, &non_number_comparison);
1669 __ ucomisd(xmm0, xmm1);
1671 // Don't base result on EFLAGS when a NaN is involved.
1672 __ j(parity_even, &unordered, Label::kNear);
1673 // Return a result of -1, 0, or 1, based on EFLAGS.
1674 __ setcc(above, rax);
1675 __ setcc(below, rcx);
1679 // If one of the numbers was NaN, then the result is always false.
1680 // The cc is never not-equal.
1681 __ bind(&unordered);
1682 DCHECK(cc != not_equal);
1683 if (cc == less || cc == less_equal) {
1690 // The number comparison code did not provide a valid result.
1691 __ bind(&non_number_comparison);
1693 // Fast negative check for internalized-to-internalized equality.
1694 Label check_for_strings;
1696 BranchIfNotInternalizedString(
1697 masm, &check_for_strings, rax, kScratchRegister);
1698 BranchIfNotInternalizedString(
1699 masm, &check_for_strings, rdx, kScratchRegister);
1701 // We've already checked for object identity, so if both operands are
1702 // internalized strings they aren't equal. Register rax (not rax) already
1703 // holds a non-zero value, which indicates not equal, so just return.
1707 __ bind(&check_for_strings);
1709 __ JumpIfNotBothSequentialOneByteStrings(rdx, rax, rcx, rbx,
1710 &check_unequal_objects);
1712 // Inline comparison of one-byte strings.
1714 StringHelper::GenerateFlatOneByteStringEquals(masm, rdx, rax, rcx, rbx);
1716 StringHelper::GenerateCompareFlatOneByteStrings(masm, rdx, rax, rcx, rbx,
1721 __ Abort(kUnexpectedFallThroughFromStringComparison);
1724 __ bind(&check_unequal_objects);
1725 if (cc == equal && !strict()) {
1726 // Not strict equality. Objects are unequal if
1727 // they are both JSObjects and not undetectable,
1728 // and their pointers are different.
1729 Label return_unequal;
1730 // At most one is a smi, so we can test for smi by adding the two.
1731 // A smi plus a heap object has the low bit set, a heap object plus
1732 // a heap object has the low bit clear.
1733 STATIC_ASSERT(kSmiTag == 0);
1734 STATIC_ASSERT(kSmiTagMask == 1);
1735 __ leap(rcx, Operand(rax, rdx, times_1, 0));
1736 __ testb(rcx, Immediate(kSmiTagMask));
1737 __ j(not_zero, &runtime_call, Label::kNear);
1738 __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rbx);
1739 __ j(below, &runtime_call, Label::kNear);
1740 __ CmpObjectType(rdx, FIRST_SPEC_OBJECT_TYPE, rcx);
1741 __ j(below, &runtime_call, Label::kNear);
1742 __ testb(FieldOperand(rbx, Map::kBitFieldOffset),
1743 Immediate(1 << Map::kIsUndetectable));
1744 __ j(zero, &return_unequal, Label::kNear);
1745 __ testb(FieldOperand(rcx, Map::kBitFieldOffset),
1746 Immediate(1 << Map::kIsUndetectable));
1747 __ j(zero, &return_unequal, Label::kNear);
1748 // The objects are both undetectable, so they both compare as the value
1749 // undefined, and are equal.
1751 __ bind(&return_unequal);
1752 // Return non-equal by returning the non-zero object pointer in rax,
1753 // or return equal if we fell through to here.
1756 __ bind(&runtime_call);
1758 // Push arguments below the return address to prepare jump to builtin.
1759 __ PopReturnAddressTo(rcx);
1763 // Figure out which native to call and setup the arguments.
1764 if (cc == equal && strict()) {
1765 __ PushReturnAddressFrom(rcx);
1766 __ TailCallRuntime(Runtime::kStrictEquals, 2, 1);
1770 context_index = Context::EQUALS_BUILTIN_INDEX;
1772 context_index = is_strong(strength())
1773 ? Context::COMPARE_STRONG_BUILTIN_INDEX
1774 : Context::COMPARE_BUILTIN_INDEX;
1775 __ Push(Smi::FromInt(NegativeComparisonResult(cc)));
1778 __ PushReturnAddressFrom(rcx);
1780 // Call the native; it returns -1 (less), 0 (equal), or 1 (greater)
1781 // tagged as a small integer.
1782 __ InvokeBuiltin(context_index, JUMP_FUNCTION);
1790 static void CallStubInRecordCallTarget(MacroAssembler* masm, CodeStub* stub,
1792 // rax : number of arguments to the construct function
1793 // rbx : feedback vector
1794 // rcx : original constructor (for IsSuperConstructorCall)
1795 // rdx : slot in feedback vector (Smi)
1796 // rdi : the function to call
1797 FrameScope scope(masm, StackFrame::INTERNAL);
1799 // Number-of-arguments register must be smi-tagged to call out.
1800 __ Integer32ToSmi(rax, rax);
1803 __ Integer32ToSmi(rdx, rdx);
1819 __ SmiToInteger32(rax, rax);
1823 static void GenerateRecordCallTarget(MacroAssembler* masm, bool is_super) {
1824 // Cache the called function in a feedback vector slot. Cache states
1825 // are uninitialized, monomorphic (indicated by a JSFunction), and
1827 // rax : number of arguments to the construct function
1828 // rbx : feedback vector
1829 // rcx : original constructor (for IsSuperConstructorCall)
1830 // rdx : slot in feedback vector (Smi)
1831 // rdi : the function to call
1832 Isolate* isolate = masm->isolate();
1833 Label initialize, done, miss, megamorphic, not_array_function,
1834 done_no_smi_convert;
1836 // Load the cache state into r11.
1837 __ SmiToInteger32(rdx, rdx);
1839 FieldOperand(rbx, rdx, times_pointer_size, FixedArray::kHeaderSize));
1841 // A monomorphic cache hit or an already megamorphic state: invoke the
1842 // function without changing the state.
1843 // We don't know if r11 is a WeakCell or a Symbol, but it's harmless to read
1844 // at this position in a symbol (see static asserts in
1845 // type-feedback-vector.h).
1846 Label check_allocation_site;
1847 __ cmpp(rdi, FieldOperand(r11, WeakCell::kValueOffset));
1848 __ j(equal, &done, Label::kFar);
1849 __ CompareRoot(r11, Heap::kmegamorphic_symbolRootIndex);
1850 __ j(equal, &done, Label::kFar);
1851 __ CompareRoot(FieldOperand(r11, HeapObject::kMapOffset),
1852 Heap::kWeakCellMapRootIndex);
1853 __ j(not_equal, FLAG_pretenuring_call_new ? &miss : &check_allocation_site);
1855 // If the weak cell is cleared, we have a new chance to become monomorphic.
1856 __ CheckSmi(FieldOperand(r11, WeakCell::kValueOffset));
1857 __ j(equal, &initialize);
1858 __ jmp(&megamorphic);
1860 if (!FLAG_pretenuring_call_new) {
1861 __ bind(&check_allocation_site);
1862 // If we came here, we need to see if we are the array function.
1863 // If we didn't have a matching function, and we didn't find the megamorph
1864 // sentinel, then we have in the slot either some other function or an
1866 __ CompareRoot(FieldOperand(r11, 0), Heap::kAllocationSiteMapRootIndex);
1867 __ j(not_equal, &miss);
1869 // Make sure the function is the Array() function
1870 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, r11);
1872 __ j(not_equal, &megamorphic);
1878 // A monomorphic miss (i.e, here the cache is not uninitialized) goes
1880 __ CompareRoot(r11, Heap::kuninitialized_symbolRootIndex);
1881 __ j(equal, &initialize);
1882 // MegamorphicSentinel is an immortal immovable object (undefined) so no
1883 // write-barrier is needed.
1884 __ bind(&megamorphic);
1885 __ Move(FieldOperand(rbx, rdx, times_pointer_size, FixedArray::kHeaderSize),
1886 TypeFeedbackVector::MegamorphicSentinel(isolate));
1889 // An uninitialized cache is patched with the function or sentinel to
1890 // indicate the ElementsKind if function is the Array constructor.
1891 __ bind(&initialize);
1893 if (!FLAG_pretenuring_call_new) {
1894 // Make sure the function is the Array() function
1895 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, r11);
1897 __ j(not_equal, ¬_array_function);
1899 CreateAllocationSiteStub create_stub(isolate);
1900 CallStubInRecordCallTarget(masm, &create_stub, is_super);
1901 __ jmp(&done_no_smi_convert);
1903 __ bind(¬_array_function);
1906 CreateWeakCellStub create_stub(isolate);
1907 CallStubInRecordCallTarget(masm, &create_stub, is_super);
1908 __ jmp(&done_no_smi_convert);
1911 __ Integer32ToSmi(rdx, rdx);
1913 __ bind(&done_no_smi_convert);
1917 static void EmitContinueIfStrictOrNative(MacroAssembler* masm, Label* cont) {
1918 // Do not transform the receiver for strict mode functions.
1919 __ movp(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
1920 __ testb(FieldOperand(rcx, SharedFunctionInfo::kStrictModeByteOffset),
1921 Immediate(1 << SharedFunctionInfo::kStrictModeBitWithinByte));
1922 __ j(not_equal, cont);
1924 // Do not transform the receiver for natives.
1925 // SharedFunctionInfo is already loaded into rcx.
1926 __ testb(FieldOperand(rcx, SharedFunctionInfo::kNativeByteOffset),
1927 Immediate(1 << SharedFunctionInfo::kNativeBitWithinByte));
1928 __ j(not_equal, cont);
1932 static void EmitSlowCase(Isolate* isolate,
1933 MacroAssembler* masm,
1934 StackArgumentsAccessor* args,
1936 Label* non_function) {
1937 // Check for function proxy.
1938 __ CmpInstanceType(rcx, JS_FUNCTION_PROXY_TYPE);
1939 __ j(not_equal, non_function);
1940 __ PopReturnAddressTo(rcx);
1941 __ Push(rdi); // put proxy as additional argument under return address
1942 __ PushReturnAddressFrom(rcx);
1943 __ Set(rax, argc + 1);
1945 __ GetBuiltinEntry(rdx, Context::CALL_FUNCTION_PROXY_BUILTIN_INDEX);
1947 Handle<Code> adaptor =
1948 masm->isolate()->builtins()->ArgumentsAdaptorTrampoline();
1949 __ jmp(adaptor, RelocInfo::CODE_TARGET);
1952 // CALL_NON_FUNCTION expects the non-function callee as receiver (instead
1953 // of the original receiver from the call site).
1954 __ bind(non_function);
1955 __ movp(args->GetReceiverOperand(), rdi);
1958 __ GetBuiltinEntry(rdx, Context::CALL_NON_FUNCTION_BUILTIN_INDEX);
1959 Handle<Code> adaptor =
1960 isolate->builtins()->ArgumentsAdaptorTrampoline();
1961 __ Jump(adaptor, RelocInfo::CODE_TARGET);
1965 static void EmitWrapCase(MacroAssembler* masm,
1966 StackArgumentsAccessor* args,
1968 // Wrap the receiver and patch it back onto the stack.
1969 { FrameScope frame_scope(masm, StackFrame::INTERNAL);
1971 ToObjectStub stub(masm->isolate());
1975 __ movp(args->GetReceiverOperand(), rax);
1980 static void CallFunctionNoFeedback(MacroAssembler* masm,
1981 int argc, bool needs_checks,
1982 bool call_as_method) {
1983 // rdi : the function to call
1985 // wrap_and_call can only be true if we are compiling a monomorphic method.
1986 Isolate* isolate = masm->isolate();
1987 Label slow, non_function, wrap, cont;
1988 StackArgumentsAccessor args(rsp, argc);
1991 // Check that the function really is a JavaScript function.
1992 __ JumpIfSmi(rdi, &non_function);
1994 // Goto slow case if we do not have a function.
1995 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
1996 __ j(not_equal, &slow);
1999 // Fast-case: Just invoke the function.
2000 ParameterCount actual(argc);
2002 if (call_as_method) {
2004 EmitContinueIfStrictOrNative(masm, &cont);
2007 // Load the receiver from the stack.
2008 __ movp(rax, args.GetReceiverOperand());
2011 __ JumpIfSmi(rax, &wrap);
2013 __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rcx);
2022 __ InvokeFunction(rdi, actual, JUMP_FUNCTION, NullCallWrapper());
2025 // Slow-case: Non-function called.
2027 EmitSlowCase(isolate, masm, &args, argc, &non_function);
2030 if (call_as_method) {
2032 EmitWrapCase(masm, &args, &cont);
2037 void CallFunctionStub::Generate(MacroAssembler* masm) {
2038 CallFunctionNoFeedback(masm, argc(), NeedsChecks(), CallAsMethod());
2042 void CallConstructStub::Generate(MacroAssembler* masm) {
2043 // rax : number of arguments
2044 // rbx : feedback vector
2045 // rcx : original constructor (for IsSuperConstructorCall)
2046 // rdx : slot in feedback vector (Smi, for RecordCallTarget)
2047 // rdi : constructor function
2048 Label slow, non_function_call;
2050 // Check that function is not a smi.
2051 __ JumpIfSmi(rdi, &non_function_call);
2052 // Check that function is a JSFunction.
2053 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, r11);
2054 __ j(not_equal, &slow);
2056 if (RecordCallTarget()) {
2057 GenerateRecordCallTarget(masm, IsSuperConstructorCall());
2059 __ SmiToInteger32(rdx, rdx);
2060 if (FLAG_pretenuring_call_new) {
2061 // Put the AllocationSite from the feedback vector into ebx.
2062 // By adding kPointerSize we encode that we know the AllocationSite
2063 // entry is at the feedback vector slot given by rdx + 1.
2064 __ movp(rbx, FieldOperand(rbx, rdx, times_pointer_size,
2065 FixedArray::kHeaderSize + kPointerSize));
2067 Label feedback_register_initialized;
2068 // Put the AllocationSite from the feedback vector into rbx, or undefined.
2069 __ movp(rbx, FieldOperand(rbx, rdx, times_pointer_size,
2070 FixedArray::kHeaderSize));
2071 __ CompareRoot(FieldOperand(rbx, 0), Heap::kAllocationSiteMapRootIndex);
2072 __ j(equal, &feedback_register_initialized);
2073 __ LoadRoot(rbx, Heap::kUndefinedValueRootIndex);
2074 __ bind(&feedback_register_initialized);
2077 __ AssertUndefinedOrAllocationSite(rbx);
2080 // Pass original constructor to construct stub.
2081 if (IsSuperConstructorCall()) {
2087 // Jump to the function-specific construct stub.
2088 Register jmp_reg = rcx;
2089 __ movp(jmp_reg, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
2090 __ movp(jmp_reg, FieldOperand(jmp_reg,
2091 SharedFunctionInfo::kConstructStubOffset));
2092 __ leap(jmp_reg, FieldOperand(jmp_reg, Code::kHeaderSize));
2095 // rdi: called object
2096 // rax: number of arguments
2100 __ CmpInstanceType(r11, JS_FUNCTION_PROXY_TYPE);
2101 __ j(not_equal, &non_function_call);
2102 __ GetBuiltinEntry(rdx,
2103 Context::CALL_FUNCTION_PROXY_AS_CONSTRUCTOR_BUILTIN_INDEX);
2106 __ bind(&non_function_call);
2107 __ GetBuiltinEntry(rdx,
2108 Context::CALL_NON_FUNCTION_AS_CONSTRUCTOR_BUILTIN_INDEX);
2110 // Set expected number of arguments to zero (not changing rax).
2112 __ Jump(isolate()->builtins()->ArgumentsAdaptorTrampoline(),
2113 RelocInfo::CODE_TARGET);
2117 static void EmitLoadTypeFeedbackVector(MacroAssembler* masm, Register vector) {
2118 __ movp(vector, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
2119 __ movp(vector, FieldOperand(vector, JSFunction::kSharedFunctionInfoOffset));
2120 __ movp(vector, FieldOperand(vector,
2121 SharedFunctionInfo::kFeedbackVectorOffset));
2125 void CallIC_ArrayStub::Generate(MacroAssembler* masm) {
2127 // rdx - slot id (as integer)
2130 int argc = arg_count();
2131 ParameterCount actual(argc);
2133 __ SmiToInteger32(rdx, rdx);
2135 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, rcx);
2137 __ j(not_equal, &miss);
2139 __ movp(rax, Immediate(arg_count()));
2140 __ movp(rcx, FieldOperand(rbx, rdx, times_pointer_size,
2141 FixedArray::kHeaderSize));
2142 // Verify that ecx contains an AllocationSite
2143 Factory* factory = masm->isolate()->factory();
2144 __ Cmp(FieldOperand(rcx, HeapObject::kMapOffset),
2145 factory->allocation_site_map());
2146 __ j(not_equal, &miss);
2148 // Increment the call count for monomorphic function calls.
2149 __ SmiAddConstant(FieldOperand(rbx, rdx, times_pointer_size,
2150 FixedArray::kHeaderSize + kPointerSize),
2151 Smi::FromInt(CallICNexus::kCallCountIncrement));
2155 ArrayConstructorStub stub(masm->isolate(), arg_count());
2156 __ TailCallStub(&stub);
2161 // The slow case, we need this no matter what to complete a call after a miss.
2162 CallFunctionNoFeedback(masm,
2172 void CallICStub::Generate(MacroAssembler* masm) {
2176 Isolate* isolate = masm->isolate();
2177 const int with_types_offset =
2178 FixedArray::OffsetOfElementAt(TypeFeedbackVector::kWithTypesIndex);
2179 const int generic_offset =
2180 FixedArray::OffsetOfElementAt(TypeFeedbackVector::kGenericCountIndex);
2181 Label extra_checks_or_miss, slow_start;
2182 Label slow, non_function, wrap, cont;
2183 Label have_js_function;
2184 int argc = arg_count();
2185 StackArgumentsAccessor args(rsp, argc);
2186 ParameterCount actual(argc);
2188 // The checks. First, does rdi match the recorded monomorphic target?
2189 __ SmiToInteger32(rdx, rdx);
2191 FieldOperand(rbx, rdx, times_pointer_size, FixedArray::kHeaderSize));
2193 // We don't know that we have a weak cell. We might have a private symbol
2194 // or an AllocationSite, but the memory is safe to examine.
2195 // AllocationSite::kTransitionInfoOffset - contains a Smi or pointer to
2197 // WeakCell::kValueOffset - contains a JSFunction or Smi(0)
2198 // Symbol::kHashFieldSlot - if the low bit is 1, then the hash is not
2199 // computed, meaning that it can't appear to be a pointer. If the low bit is
2200 // 0, then hash is computed, but the 0 bit prevents the field from appearing
2202 STATIC_ASSERT(WeakCell::kSize >= kPointerSize);
2203 STATIC_ASSERT(AllocationSite::kTransitionInfoOffset ==
2204 WeakCell::kValueOffset &&
2205 WeakCell::kValueOffset == Symbol::kHashFieldSlot);
2207 __ cmpp(rdi, FieldOperand(rcx, WeakCell::kValueOffset));
2208 __ j(not_equal, &extra_checks_or_miss);
2210 // The compare above could have been a SMI/SMI comparison. Guard against this
2211 // convincing us that we have a monomorphic JSFunction.
2212 __ JumpIfSmi(rdi, &extra_checks_or_miss);
2214 // Increment the call count for monomorphic function calls.
2215 __ SmiAddConstant(FieldOperand(rbx, rdx, times_pointer_size,
2216 FixedArray::kHeaderSize + kPointerSize),
2217 Smi::FromInt(CallICNexus::kCallCountIncrement));
2219 __ bind(&have_js_function);
2220 if (CallAsMethod()) {
2221 EmitContinueIfStrictOrNative(masm, &cont);
2223 // Load the receiver from the stack.
2224 __ movp(rax, args.GetReceiverOperand());
2226 __ JumpIfSmi(rax, &wrap);
2228 __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rcx);
2234 __ InvokeFunction(rdi, actual, JUMP_FUNCTION, NullCallWrapper());
2237 EmitSlowCase(isolate, masm, &args, argc, &non_function);
2239 if (CallAsMethod()) {
2241 EmitWrapCase(masm, &args, &cont);
2244 __ bind(&extra_checks_or_miss);
2245 Label uninitialized, miss;
2247 __ Cmp(rcx, TypeFeedbackVector::MegamorphicSentinel(isolate));
2248 __ j(equal, &slow_start);
2250 // The following cases attempt to handle MISS cases without going to the
2252 if (FLAG_trace_ic) {
2256 __ Cmp(rcx, TypeFeedbackVector::UninitializedSentinel(isolate));
2257 __ j(equal, &uninitialized);
2259 // We are going megamorphic. If the feedback is a JSFunction, it is fine
2260 // to handle it here. More complex cases are dealt with in the runtime.
2261 __ AssertNotSmi(rcx);
2262 __ CmpObjectType(rcx, JS_FUNCTION_TYPE, rcx);
2263 __ j(not_equal, &miss);
2264 __ Move(FieldOperand(rbx, rdx, times_pointer_size, FixedArray::kHeaderSize),
2265 TypeFeedbackVector::MegamorphicSentinel(isolate));
2266 // We have to update statistics for runtime profiling.
2267 __ SmiAddConstant(FieldOperand(rbx, with_types_offset), Smi::FromInt(-1));
2268 __ SmiAddConstant(FieldOperand(rbx, generic_offset), Smi::FromInt(1));
2269 __ jmp(&slow_start);
2271 __ bind(&uninitialized);
2273 // We are going monomorphic, provided we actually have a JSFunction.
2274 __ JumpIfSmi(rdi, &miss);
2276 // Goto miss case if we do not have a function.
2277 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
2278 __ j(not_equal, &miss);
2280 // Make sure the function is not the Array() function, which requires special
2281 // behavior on MISS.
2282 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, rcx);
2287 __ SmiAddConstant(FieldOperand(rbx, with_types_offset), Smi::FromInt(1));
2289 // Initialize the call counter.
2290 __ Move(FieldOperand(rbx, rdx, times_pointer_size,
2291 FixedArray::kHeaderSize + kPointerSize),
2292 Smi::FromInt(CallICNexus::kCallCountIncrement));
2294 // Store the function. Use a stub since we need a frame for allocation.
2296 // rdx - slot (needs to be in smi form)
2299 FrameScope scope(masm, StackFrame::INTERNAL);
2300 CreateWeakCellStub create_stub(isolate);
2302 __ Integer32ToSmi(rdx, rdx);
2304 __ CallStub(&create_stub);
2308 __ jmp(&have_js_function);
2310 // We are here because tracing is on or we encountered a MISS case we can't
2316 __ bind(&slow_start);
2317 // Check that function is not a smi.
2318 __ JumpIfSmi(rdi, &non_function);
2319 // Check that function is a JSFunction.
2320 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
2321 __ j(not_equal, &slow);
2322 __ jmp(&have_js_function);
2329 void CallICStub::GenerateMiss(MacroAssembler* masm) {
2330 FrameScope scope(masm, StackFrame::INTERNAL);
2332 // Push the receiver and the function and feedback info.
2335 __ Integer32ToSmi(rdx, rdx);
2339 Runtime::FunctionId id = GetICState() == DEFAULT
2340 ? Runtime::kCallIC_Miss
2341 : Runtime::kCallIC_Customization_Miss;
2342 __ CallRuntime(id, 3);
2344 // Move result to edi and exit the internal frame.
2349 bool CEntryStub::NeedsImmovableCode() {
2354 void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) {
2355 CEntryStub::GenerateAheadOfTime(isolate);
2356 StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate);
2357 StubFailureTrampolineStub::GenerateAheadOfTime(isolate);
2358 // It is important that the store buffer overflow stubs are generated first.
2359 ArrayConstructorStubBase::GenerateStubsAheadOfTime(isolate);
2360 CreateAllocationSiteStub::GenerateAheadOfTime(isolate);
2361 CreateWeakCellStub::GenerateAheadOfTime(isolate);
2362 BinaryOpICStub::GenerateAheadOfTime(isolate);
2363 BinaryOpICWithAllocationSiteStub::GenerateAheadOfTime(isolate);
2364 StoreFastElementStub::GenerateAheadOfTime(isolate);
2365 TypeofStub::GenerateAheadOfTime(isolate);
2369 void CodeStub::GenerateFPStubs(Isolate* isolate) {
2373 void CEntryStub::GenerateAheadOfTime(Isolate* isolate) {
2374 CEntryStub stub(isolate, 1, kDontSaveFPRegs);
2376 CEntryStub save_doubles(isolate, 1, kSaveFPRegs);
2377 save_doubles.GetCode();
2381 void CEntryStub::Generate(MacroAssembler* masm) {
2382 // rax: number of arguments including receiver
2383 // rbx: pointer to C function (C callee-saved)
2384 // rbp: frame pointer of calling JS frame (restored after C call)
2385 // rsp: stack pointer (restored after C call)
2386 // rsi: current context (restored)
2388 ProfileEntryHookStub::MaybeCallEntryHook(masm);
2390 // Enter the exit frame that transitions from JavaScript to C++.
2392 int arg_stack_space = (result_size() < 2 ? 2 : 4);
2394 int arg_stack_space = 0;
2396 __ EnterExitFrame(arg_stack_space, save_doubles());
2398 // rbx: pointer to builtin function (C callee-saved).
2399 // rbp: frame pointer of exit frame (restored after C call).
2400 // rsp: stack pointer (restored after C call).
2401 // r14: number of arguments including receiver (C callee-saved).
2402 // r15: argv pointer (C callee-saved).
2404 // Simple results returned in rax (both AMD64 and Win64 calling conventions).
2405 // Complex results must be written to address passed as first argument.
2406 // AMD64 calling convention: a struct of two pointers in rax+rdx
2408 // Check stack alignment.
2409 if (FLAG_debug_code) {
2410 __ CheckStackAlignment();
2415 // Windows 64-bit ABI passes arguments in rcx, rdx, r8, r9.
2416 // Pass argv and argc as two parameters. The arguments object will
2417 // be created by stubs declared by DECLARE_RUNTIME_FUNCTION().
2418 if (result_size() < 2) {
2419 // Pass a pointer to the Arguments object as the first argument.
2420 // Return result in single register (rax).
2421 __ movp(rcx, r14); // argc.
2422 __ movp(rdx, r15); // argv.
2423 __ Move(r8, ExternalReference::isolate_address(isolate()));
2425 DCHECK_EQ(2, result_size());
2426 // Pass a pointer to the result location as the first argument.
2427 __ leap(rcx, StackSpaceOperand(2));
2428 // Pass a pointer to the Arguments object as the second argument.
2429 __ movp(rdx, r14); // argc.
2430 __ movp(r8, r15); // argv.
2431 __ Move(r9, ExternalReference::isolate_address(isolate()));
2435 // GCC passes arguments in rdi, rsi, rdx, rcx, r8, r9.
2436 __ movp(rdi, r14); // argc.
2437 __ movp(rsi, r15); // argv.
2438 __ Move(rdx, ExternalReference::isolate_address(isolate()));
2441 // Result is in rax - do not destroy this register!
2444 // If return value is on the stack, pop it to registers.
2445 if (result_size() > 1) {
2446 DCHECK_EQ(2, result_size());
2447 // Read result values stored on stack. Result is stored
2448 // above the four argument mirror slots and the two
2449 // Arguments object slots.
2450 __ movq(rax, Operand(rsp, 6 * kRegisterSize));
2451 __ movq(rdx, Operand(rsp, 7 * kRegisterSize));
2455 // Check result for exception sentinel.
2456 Label exception_returned;
2457 __ CompareRoot(rax, Heap::kExceptionRootIndex);
2458 __ j(equal, &exception_returned);
2460 // Check that there is no pending exception, otherwise we
2461 // should have returned the exception sentinel.
2462 if (FLAG_debug_code) {
2464 __ LoadRoot(r14, Heap::kTheHoleValueRootIndex);
2465 ExternalReference pending_exception_address(
2466 Isolate::kPendingExceptionAddress, isolate());
2467 Operand pending_exception_operand =
2468 masm->ExternalOperand(pending_exception_address);
2469 __ cmpp(r14, pending_exception_operand);
2470 __ j(equal, &okay, Label::kNear);
2475 // Exit the JavaScript to C++ exit frame.
2476 __ LeaveExitFrame(save_doubles());
2479 // Handling of exception.
2480 __ bind(&exception_returned);
2482 ExternalReference pending_handler_context_address(
2483 Isolate::kPendingHandlerContextAddress, isolate());
2484 ExternalReference pending_handler_code_address(
2485 Isolate::kPendingHandlerCodeAddress, isolate());
2486 ExternalReference pending_handler_offset_address(
2487 Isolate::kPendingHandlerOffsetAddress, isolate());
2488 ExternalReference pending_handler_fp_address(
2489 Isolate::kPendingHandlerFPAddress, isolate());
2490 ExternalReference pending_handler_sp_address(
2491 Isolate::kPendingHandlerSPAddress, isolate());
2493 // Ask the runtime for help to determine the handler. This will set rax to
2494 // contain the current pending exception, don't clobber it.
2495 ExternalReference find_handler(Runtime::kUnwindAndFindExceptionHandler,
2498 FrameScope scope(masm, StackFrame::MANUAL);
2499 __ movp(arg_reg_1, Immediate(0)); // argc.
2500 __ movp(arg_reg_2, Immediate(0)); // argv.
2501 __ Move(arg_reg_3, ExternalReference::isolate_address(isolate()));
2502 __ PrepareCallCFunction(3);
2503 __ CallCFunction(find_handler, 3);
2506 // Retrieve the handler context, SP and FP.
2507 __ movp(rsi, masm->ExternalOperand(pending_handler_context_address));
2508 __ movp(rsp, masm->ExternalOperand(pending_handler_sp_address));
2509 __ movp(rbp, masm->ExternalOperand(pending_handler_fp_address));
2511 // If the handler is a JS frame, restore the context to the frame. Note that
2512 // the context will be set to (rsi == 0) for non-JS frames.
2515 __ j(zero, &skip, Label::kNear);
2516 __ movp(Operand(rbp, StandardFrameConstants::kContextOffset), rsi);
2519 // Compute the handler entry address and jump to it.
2520 __ movp(rdi, masm->ExternalOperand(pending_handler_code_address));
2521 __ movp(rdx, masm->ExternalOperand(pending_handler_offset_address));
2522 __ leap(rdi, FieldOperand(rdi, rdx, times_1, Code::kHeaderSize));
2527 void JSEntryStub::Generate(MacroAssembler* masm) {
2528 Label invoke, handler_entry, exit;
2529 Label not_outermost_js, not_outermost_js_2;
2531 ProfileEntryHookStub::MaybeCallEntryHook(masm);
2533 { // NOLINT. Scope block confuses linter.
2534 MacroAssembler::NoRootArrayScope uninitialized_root_register(masm);
2539 // Push the stack frame type marker twice.
2540 int marker = type();
2541 // Scratch register is neither callee-save, nor an argument register on any
2542 // platform. It's free to use at this point.
2543 // Cannot use smi-register for loading yet.
2544 __ Move(kScratchRegister, Smi::FromInt(marker), Assembler::RelocInfoNone());
2545 __ Push(kScratchRegister); // context slot
2546 __ Push(kScratchRegister); // function slot
2547 // Save callee-saved registers (X64/X32/Win64 calling conventions).
2553 __ pushq(rdi); // Only callee save in Win64 ABI, argument in AMD64 ABI.
2554 __ pushq(rsi); // Only callee save in Win64 ABI, argument in AMD64 ABI.
2559 // On Win64 XMM6-XMM15 are callee-save
2560 __ subp(rsp, Immediate(EntryFrameConstants::kXMMRegistersBlockSize));
2561 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 0), xmm6);
2562 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 1), xmm7);
2563 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 2), xmm8);
2564 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 3), xmm9);
2565 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 4), xmm10);
2566 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 5), xmm11);
2567 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 6), xmm12);
2568 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 7), xmm13);
2569 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 8), xmm14);
2570 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 9), xmm15);
2573 // Set up the roots and smi constant registers.
2574 // Needs to be done before any further smi loads.
2575 __ InitializeRootRegister();
2578 // Save copies of the top frame descriptor on the stack.
2579 ExternalReference c_entry_fp(Isolate::kCEntryFPAddress, isolate());
2581 Operand c_entry_fp_operand = masm->ExternalOperand(c_entry_fp);
2582 __ Push(c_entry_fp_operand);
2585 // If this is the outermost JS call, set js_entry_sp value.
2586 ExternalReference js_entry_sp(Isolate::kJSEntrySPAddress, isolate());
2587 __ Load(rax, js_entry_sp);
2589 __ j(not_zero, ¬_outermost_js);
2590 __ Push(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME));
2592 __ Store(js_entry_sp, rax);
2595 __ bind(¬_outermost_js);
2596 __ Push(Smi::FromInt(StackFrame::INNER_JSENTRY_FRAME));
2599 // Jump to a faked try block that does the invoke, with a faked catch
2600 // block that sets the pending exception.
2602 __ bind(&handler_entry);
2603 handler_offset_ = handler_entry.pos();
2604 // Caught exception: Store result (exception) in the pending exception
2605 // field in the JSEnv and return a failure sentinel.
2606 ExternalReference pending_exception(Isolate::kPendingExceptionAddress,
2608 __ Store(pending_exception, rax);
2609 __ LoadRoot(rax, Heap::kExceptionRootIndex);
2612 // Invoke: Link this frame into the handler chain.
2614 __ PushStackHandler();
2616 // Clear any pending exceptions.
2617 __ LoadRoot(rax, Heap::kTheHoleValueRootIndex);
2618 __ Store(pending_exception, rax);
2620 // Fake a receiver (NULL).
2621 __ Push(Immediate(0)); // receiver
2623 // Invoke the function by calling through JS entry trampoline builtin and
2624 // pop the faked function when we return. We load the address from an
2625 // external reference instead of inlining the call target address directly
2626 // in the code, because the builtin stubs may not have been generated yet
2627 // at the time this code is generated.
2628 if (type() == StackFrame::ENTRY_CONSTRUCT) {
2629 ExternalReference construct_entry(Builtins::kJSConstructEntryTrampoline,
2631 __ Load(rax, construct_entry);
2633 ExternalReference entry(Builtins::kJSEntryTrampoline, isolate());
2634 __ Load(rax, entry);
2636 __ leap(kScratchRegister, FieldOperand(rax, Code::kHeaderSize));
2637 __ call(kScratchRegister);
2639 // Unlink this frame from the handler chain.
2640 __ PopStackHandler();
2643 // Check if the current stack frame is marked as the outermost JS frame.
2645 __ Cmp(rbx, Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME));
2646 __ j(not_equal, ¬_outermost_js_2);
2647 __ Move(kScratchRegister, js_entry_sp);
2648 __ movp(Operand(kScratchRegister, 0), Immediate(0));
2649 __ bind(¬_outermost_js_2);
2651 // Restore the top frame descriptor from the stack.
2652 { Operand c_entry_fp_operand = masm->ExternalOperand(c_entry_fp);
2653 __ Pop(c_entry_fp_operand);
2656 // Restore callee-saved registers (X64 conventions).
2658 // On Win64 XMM6-XMM15 are callee-save
2659 __ movdqu(xmm6, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 0));
2660 __ movdqu(xmm7, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 1));
2661 __ movdqu(xmm8, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 2));
2662 __ movdqu(xmm9, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 3));
2663 __ movdqu(xmm10, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 4));
2664 __ movdqu(xmm11, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 5));
2665 __ movdqu(xmm12, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 6));
2666 __ movdqu(xmm13, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 7));
2667 __ movdqu(xmm14, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 8));
2668 __ movdqu(xmm15, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 9));
2669 __ addp(rsp, Immediate(EntryFrameConstants::kXMMRegistersBlockSize));
2674 // Callee save on in Win64 ABI, arguments/volatile in AMD64 ABI.
2682 __ addp(rsp, Immediate(2 * kPointerSize)); // remove markers
2684 // Restore frame pointer and return.
2690 void InstanceOfStub::Generate(MacroAssembler* masm) {
2691 Register const object = rdx; // Object (lhs).
2692 Register const function = rax; // Function (rhs).
2693 Register const object_map = rcx; // Map of {object}.
2694 Register const function_map = r8; // Map of {function}.
2695 Register const function_prototype = rdi; // Prototype of {function}.
2697 DCHECK(object.is(InstanceOfDescriptor::LeftRegister()));
2698 DCHECK(function.is(InstanceOfDescriptor::RightRegister()));
2700 // Check if {object} is a smi.
2701 Label object_is_smi;
2702 __ JumpIfSmi(object, &object_is_smi, Label::kNear);
2704 // Lookup the {function} and the {object} map in the global instanceof cache.
2705 // Note: This is safe because we clear the global instanceof cache whenever
2706 // we change the prototype of any object.
2707 Label fast_case, slow_case;
2708 __ movp(object_map, FieldOperand(object, HeapObject::kMapOffset));
2709 __ CompareRoot(function, Heap::kInstanceofCacheFunctionRootIndex);
2710 __ j(not_equal, &fast_case, Label::kNear);
2711 __ CompareRoot(object_map, Heap::kInstanceofCacheMapRootIndex);
2712 __ j(not_equal, &fast_case, Label::kNear);
2713 __ LoadRoot(rax, Heap::kInstanceofCacheAnswerRootIndex);
2716 // If {object} is a smi we can safely return false if {function} is a JS
2717 // function, otherwise we have to miss to the runtime and throw an exception.
2718 __ bind(&object_is_smi);
2719 __ JumpIfSmi(function, &slow_case);
2720 __ CmpObjectType(function, JS_FUNCTION_TYPE, function_map);
2721 __ j(not_equal, &slow_case);
2722 __ LoadRoot(rax, Heap::kFalseValueRootIndex);
2725 // Fast-case: The {function} must be a valid JSFunction.
2726 __ bind(&fast_case);
2727 __ JumpIfSmi(function, &slow_case);
2728 __ CmpObjectType(function, JS_FUNCTION_TYPE, function_map);
2729 __ j(not_equal, &slow_case);
2731 // Ensure that {function} has an instance prototype.
2732 __ testb(FieldOperand(function_map, Map::kBitFieldOffset),
2733 Immediate(1 << Map::kHasNonInstancePrototype));
2734 __ j(not_zero, &slow_case);
2736 // Ensure that {function} is not bound.
2737 Register const shared_info = kScratchRegister;
2738 __ movp(shared_info,
2739 FieldOperand(function, JSFunction::kSharedFunctionInfoOffset));
2740 __ TestBitSharedFunctionInfoSpecialField(
2741 shared_info, SharedFunctionInfo::kCompilerHintsOffset,
2742 SharedFunctionInfo::kBoundFunction);
2743 __ j(not_zero, &slow_case);
2745 // Get the "prototype" (or initial map) of the {function}.
2746 __ movp(function_prototype,
2747 FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
2748 __ AssertNotSmi(function_prototype);
2750 // Resolve the prototype if the {function} has an initial map. Afterwards the
2751 // {function_prototype} will be either the JSReceiver prototype object or the
2752 // hole value, which means that no instances of the {function} were created so
2753 // far and hence we should return false.
2754 Label function_prototype_valid;
2755 Register const function_prototype_map = kScratchRegister;
2756 __ CmpObjectType(function_prototype, MAP_TYPE, function_prototype_map);
2757 __ j(not_equal, &function_prototype_valid, Label::kNear);
2758 __ movp(function_prototype,
2759 FieldOperand(function_prototype, Map::kPrototypeOffset));
2760 __ bind(&function_prototype_valid);
2761 __ AssertNotSmi(function_prototype);
2763 // Update the global instanceof cache with the current {object} map and
2764 // {function}. The cached answer will be set when it is known below.
2765 __ StoreRoot(function, Heap::kInstanceofCacheFunctionRootIndex);
2766 __ StoreRoot(object_map, Heap::kInstanceofCacheMapRootIndex);
2768 // Loop through the prototype chain looking for the {function} prototype.
2769 // Assume true, and change to false if not found.
2770 Register const object_prototype = object_map;
2772 __ LoadRoot(rax, Heap::kTrueValueRootIndex);
2774 __ movp(object_prototype, FieldOperand(object_map, Map::kPrototypeOffset));
2775 __ cmpp(object_prototype, function_prototype);
2776 __ j(equal, &done, Label::kNear);
2777 __ CompareRoot(object_prototype, Heap::kNullValueRootIndex);
2778 __ movp(object_map, FieldOperand(object_prototype, HeapObject::kMapOffset));
2779 __ j(not_equal, &loop);
2780 __ LoadRoot(rax, Heap::kFalseValueRootIndex);
2782 __ StoreRoot(rax, Heap::kInstanceofCacheAnswerRootIndex);
2785 // Slow-case: Call the runtime function.
2786 __ bind(&slow_case);
2787 __ PopReturnAddressTo(kScratchRegister);
2790 __ PushReturnAddressFrom(kScratchRegister);
2791 __ TailCallRuntime(Runtime::kInstanceOf, 2, 1);
2795 // -------------------------------------------------------------------------
2796 // StringCharCodeAtGenerator
2798 void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) {
2799 // If the receiver is a smi trigger the non-string case.
2800 if (check_mode_ == RECEIVER_IS_UNKNOWN) {
2801 __ JumpIfSmi(object_, receiver_not_string_);
2803 // Fetch the instance type of the receiver into result register.
2804 __ movp(result_, FieldOperand(object_, HeapObject::kMapOffset));
2805 __ movzxbl(result_, FieldOperand(result_, Map::kInstanceTypeOffset));
2806 // If the receiver is not a string trigger the non-string case.
2807 __ testb(result_, Immediate(kIsNotStringMask));
2808 __ j(not_zero, receiver_not_string_);
2811 // If the index is non-smi trigger the non-smi case.
2812 __ JumpIfNotSmi(index_, &index_not_smi_);
2813 __ bind(&got_smi_index_);
2815 // Check for index out of range.
2816 __ SmiCompare(index_, FieldOperand(object_, String::kLengthOffset));
2817 __ j(above_equal, index_out_of_range_);
2819 __ SmiToInteger32(index_, index_);
2821 StringCharLoadGenerator::Generate(
2822 masm, object_, index_, result_, &call_runtime_);
2824 __ Integer32ToSmi(result_, result_);
2829 void StringCharCodeAtGenerator::GenerateSlow(
2830 MacroAssembler* masm, EmbedMode embed_mode,
2831 const RuntimeCallHelper& call_helper) {
2832 __ Abort(kUnexpectedFallthroughToCharCodeAtSlowCase);
2834 Factory* factory = masm->isolate()->factory();
2835 // Index is not a smi.
2836 __ bind(&index_not_smi_);
2837 // If index is a heap number, try converting it to an integer.
2839 factory->heap_number_map(),
2842 call_helper.BeforeCall(masm);
2843 if (embed_mode == PART_OF_IC_HANDLER) {
2844 __ Push(LoadWithVectorDescriptor::VectorRegister());
2845 __ Push(LoadDescriptor::SlotRegister());
2848 __ Push(index_); // Consumed by runtime conversion function.
2849 if (index_flags_ == STRING_INDEX_IS_NUMBER) {
2850 __ CallRuntime(Runtime::kNumberToIntegerMapMinusZero, 1);
2852 DCHECK(index_flags_ == STRING_INDEX_IS_ARRAY_INDEX);
2853 // NumberToSmi discards numbers that are not exact integers.
2854 __ CallRuntime(Runtime::kNumberToSmi, 1);
2856 if (!index_.is(rax)) {
2857 // Save the conversion result before the pop instructions below
2858 // have a chance to overwrite it.
2859 __ movp(index_, rax);
2862 if (embed_mode == PART_OF_IC_HANDLER) {
2863 __ Pop(LoadDescriptor::SlotRegister());
2864 __ Pop(LoadWithVectorDescriptor::VectorRegister());
2866 // Reload the instance type.
2867 __ movp(result_, FieldOperand(object_, HeapObject::kMapOffset));
2868 __ movzxbl(result_, FieldOperand(result_, Map::kInstanceTypeOffset));
2869 call_helper.AfterCall(masm);
2870 // If index is still not a smi, it must be out of range.
2871 __ JumpIfNotSmi(index_, index_out_of_range_);
2872 // Otherwise, return to the fast path.
2873 __ jmp(&got_smi_index_);
2875 // Call runtime. We get here when the receiver is a string and the
2876 // index is a number, but the code of getting the actual character
2877 // is too complex (e.g., when the string needs to be flattened).
2878 __ bind(&call_runtime_);
2879 call_helper.BeforeCall(masm);
2881 __ Integer32ToSmi(index_, index_);
2883 __ CallRuntime(Runtime::kStringCharCodeAtRT, 2);
2884 if (!result_.is(rax)) {
2885 __ movp(result_, rax);
2887 call_helper.AfterCall(masm);
2890 __ Abort(kUnexpectedFallthroughFromCharCodeAtSlowCase);
2894 // -------------------------------------------------------------------------
2895 // StringCharFromCodeGenerator
2897 void StringCharFromCodeGenerator::GenerateFast(MacroAssembler* masm) {
2898 // Fast case of Heap::LookupSingleCharacterStringFromCode.
2899 __ JumpIfNotSmi(code_, &slow_case_);
2900 __ SmiCompare(code_, Smi::FromInt(String::kMaxOneByteCharCode));
2901 __ j(above, &slow_case_);
2903 __ LoadRoot(result_, Heap::kSingleCharacterStringCacheRootIndex);
2904 SmiIndex index = masm->SmiToIndex(kScratchRegister, code_, kPointerSizeLog2);
2905 __ movp(result_, FieldOperand(result_, index.reg, index.scale,
2906 FixedArray::kHeaderSize));
2907 __ CompareRoot(result_, Heap::kUndefinedValueRootIndex);
2908 __ j(equal, &slow_case_);
2913 void StringCharFromCodeGenerator::GenerateSlow(
2914 MacroAssembler* masm,
2915 const RuntimeCallHelper& call_helper) {
2916 __ Abort(kUnexpectedFallthroughToCharFromCodeSlowCase);
2918 __ bind(&slow_case_);
2919 call_helper.BeforeCall(masm);
2921 __ CallRuntime(Runtime::kCharFromCode, 1);
2922 if (!result_.is(rax)) {
2923 __ movp(result_, rax);
2925 call_helper.AfterCall(masm);
2928 __ Abort(kUnexpectedFallthroughFromCharFromCodeSlowCase);
2932 void StringHelper::GenerateCopyCharacters(MacroAssembler* masm,
2936 String::Encoding encoding) {
2937 // Nothing to do for zero characters.
2939 __ testl(count, count);
2940 __ j(zero, &done, Label::kNear);
2942 // Make count the number of bytes to copy.
2943 if (encoding == String::TWO_BYTE_ENCODING) {
2944 STATIC_ASSERT(2 == sizeof(uc16));
2945 __ addl(count, count);
2948 // Copy remaining characters.
2951 __ movb(kScratchRegister, Operand(src, 0));
2952 __ movb(Operand(dest, 0), kScratchRegister);
2956 __ j(not_zero, &loop);
2962 void SubStringStub::Generate(MacroAssembler* masm) {
2965 // Stack frame on entry.
2966 // rsp[0] : return address
2971 enum SubStringStubArgumentIndices {
2972 STRING_ARGUMENT_INDEX,
2973 FROM_ARGUMENT_INDEX,
2975 SUB_STRING_ARGUMENT_COUNT
2978 StackArgumentsAccessor args(rsp, SUB_STRING_ARGUMENT_COUNT,
2979 ARGUMENTS_DONT_CONTAIN_RECEIVER);
2981 // Make sure first argument is a string.
2982 __ movp(rax, args.GetArgumentOperand(STRING_ARGUMENT_INDEX));
2983 STATIC_ASSERT(kSmiTag == 0);
2984 __ testl(rax, Immediate(kSmiTagMask));
2985 __ j(zero, &runtime);
2986 Condition is_string = masm->IsObjectStringType(rax, rbx, rbx);
2987 __ j(NegateCondition(is_string), &runtime);
2990 // rbx: instance type
2991 // Calculate length of sub string using the smi values.
2992 __ movp(rcx, args.GetArgumentOperand(TO_ARGUMENT_INDEX));
2993 __ movp(rdx, args.GetArgumentOperand(FROM_ARGUMENT_INDEX));
2994 __ JumpUnlessBothNonNegativeSmi(rcx, rdx, &runtime);
2996 __ SmiSub(rcx, rcx, rdx); // Overflow doesn't happen.
2997 __ cmpp(rcx, FieldOperand(rax, String::kLengthOffset));
2998 Label not_original_string;
2999 // Shorter than original string's length: an actual substring.
3000 __ j(below, ¬_original_string, Label::kNear);
3001 // Longer than original string's length or negative: unsafe arguments.
3002 __ j(above, &runtime);
3003 // Return original string.
3004 Counters* counters = isolate()->counters();
3005 __ IncrementCounter(counters->sub_string_native(), 1);
3006 __ ret(SUB_STRING_ARGUMENT_COUNT * kPointerSize);
3007 __ bind(¬_original_string);
3010 __ SmiCompare(rcx, Smi::FromInt(1));
3011 __ j(equal, &single_char);
3013 __ SmiToInteger32(rcx, rcx);
3016 // rbx: instance type
3017 // rcx: sub string length
3018 // rdx: from index (smi)
3019 // Deal with different string types: update the index if necessary
3020 // and put the underlying string into edi.
3021 Label underlying_unpacked, sliced_string, seq_or_external_string;
3022 // If the string is not indirect, it can only be sequential or external.
3023 STATIC_ASSERT(kIsIndirectStringMask == (kSlicedStringTag & kConsStringTag));
3024 STATIC_ASSERT(kIsIndirectStringMask != 0);
3025 __ testb(rbx, Immediate(kIsIndirectStringMask));
3026 __ j(zero, &seq_or_external_string, Label::kNear);
3028 __ testb(rbx, Immediate(kSlicedNotConsMask));
3029 __ j(not_zero, &sliced_string, Label::kNear);
3030 // Cons string. Check whether it is flat, then fetch first part.
3031 // Flat cons strings have an empty second part.
3032 __ CompareRoot(FieldOperand(rax, ConsString::kSecondOffset),
3033 Heap::kempty_stringRootIndex);
3034 __ j(not_equal, &runtime);
3035 __ movp(rdi, FieldOperand(rax, ConsString::kFirstOffset));
3036 // Update instance type.
3037 __ movp(rbx, FieldOperand(rdi, HeapObject::kMapOffset));
3038 __ movzxbl(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset));
3039 __ jmp(&underlying_unpacked, Label::kNear);
3041 __ bind(&sliced_string);
3042 // Sliced string. Fetch parent and correct start index by offset.
3043 __ addp(rdx, FieldOperand(rax, SlicedString::kOffsetOffset));
3044 __ movp(rdi, FieldOperand(rax, SlicedString::kParentOffset));
3045 // Update instance type.
3046 __ movp(rbx, FieldOperand(rdi, HeapObject::kMapOffset));
3047 __ movzxbl(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset));
3048 __ jmp(&underlying_unpacked, Label::kNear);
3050 __ bind(&seq_or_external_string);
3051 // Sequential or external string. Just move string to the correct register.
3054 __ bind(&underlying_unpacked);
3056 if (FLAG_string_slices) {
3058 // rdi: underlying subject string
3059 // rbx: instance type of underlying subject string
3060 // rdx: adjusted start index (smi)
3062 // If coming from the make_two_character_string path, the string
3063 // is too short to be sliced anyways.
3064 __ cmpp(rcx, Immediate(SlicedString::kMinLength));
3065 // Short slice. Copy instead of slicing.
3066 __ j(less, ©_routine);
3067 // Allocate new sliced string. At this point we do not reload the instance
3068 // type including the string encoding because we simply rely on the info
3069 // provided by the original string. It does not matter if the original
3070 // string's encoding is wrong because we always have to recheck encoding of
3071 // the newly created string's parent anyways due to externalized strings.
3072 Label two_byte_slice, set_slice_header;
3073 STATIC_ASSERT((kStringEncodingMask & kOneByteStringTag) != 0);
3074 STATIC_ASSERT((kStringEncodingMask & kTwoByteStringTag) == 0);
3075 __ testb(rbx, Immediate(kStringEncodingMask));
3076 __ j(zero, &two_byte_slice, Label::kNear);
3077 __ AllocateOneByteSlicedString(rax, rbx, r14, &runtime);
3078 __ jmp(&set_slice_header, Label::kNear);
3079 __ bind(&two_byte_slice);
3080 __ AllocateTwoByteSlicedString(rax, rbx, r14, &runtime);
3081 __ bind(&set_slice_header);
3082 __ Integer32ToSmi(rcx, rcx);
3083 __ movp(FieldOperand(rax, SlicedString::kLengthOffset), rcx);
3084 __ movp(FieldOperand(rax, SlicedString::kHashFieldOffset),
3085 Immediate(String::kEmptyHashField));
3086 __ movp(FieldOperand(rax, SlicedString::kParentOffset), rdi);
3087 __ movp(FieldOperand(rax, SlicedString::kOffsetOffset), rdx);
3088 __ IncrementCounter(counters->sub_string_native(), 1);
3089 __ ret(3 * kPointerSize);
3091 __ bind(©_routine);
3094 // rdi: underlying subject string
3095 // rbx: instance type of underlying subject string
3096 // rdx: adjusted start index (smi)
3098 // The subject string can only be external or sequential string of either
3099 // encoding at this point.
3100 Label two_byte_sequential, sequential_string;
3101 STATIC_ASSERT(kExternalStringTag != 0);
3102 STATIC_ASSERT(kSeqStringTag == 0);
3103 __ testb(rbx, Immediate(kExternalStringTag));
3104 __ j(zero, &sequential_string);
3106 // Handle external string.
3107 // Rule out short external strings.
3108 STATIC_ASSERT(kShortExternalStringTag != 0);
3109 __ testb(rbx, Immediate(kShortExternalStringMask));
3110 __ j(not_zero, &runtime);
3111 __ movp(rdi, FieldOperand(rdi, ExternalString::kResourceDataOffset));
3112 // Move the pointer so that offset-wise, it looks like a sequential string.
3113 STATIC_ASSERT(SeqTwoByteString::kHeaderSize == SeqOneByteString::kHeaderSize);
3114 __ subp(rdi, Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
3116 __ bind(&sequential_string);
3117 STATIC_ASSERT((kOneByteStringTag & kStringEncodingMask) != 0);
3118 __ testb(rbx, Immediate(kStringEncodingMask));
3119 __ j(zero, &two_byte_sequential);
3121 // Allocate the result.
3122 __ AllocateOneByteString(rax, rcx, r11, r14, r15, &runtime);
3124 // rax: result string
3125 // rcx: result string length
3126 { // Locate character of sub string start.
3127 SmiIndex smi_as_index = masm->SmiToIndex(rdx, rdx, times_1);
3128 __ leap(r14, Operand(rdi, smi_as_index.reg, smi_as_index.scale,
3129 SeqOneByteString::kHeaderSize - kHeapObjectTag));
3131 // Locate first character of result.
3132 __ leap(rdi, FieldOperand(rax, SeqOneByteString::kHeaderSize));
3134 // rax: result string
3135 // rcx: result length
3136 // r14: first character of result
3137 // rsi: character of sub string start
3138 StringHelper::GenerateCopyCharacters(
3139 masm, rdi, r14, rcx, String::ONE_BYTE_ENCODING);
3140 __ IncrementCounter(counters->sub_string_native(), 1);
3141 __ ret(SUB_STRING_ARGUMENT_COUNT * kPointerSize);
3143 __ bind(&two_byte_sequential);
3144 // Allocate the result.
3145 __ AllocateTwoByteString(rax, rcx, r11, r14, r15, &runtime);
3147 // rax: result string
3148 // rcx: result string length
3149 { // Locate character of sub string start.
3150 SmiIndex smi_as_index = masm->SmiToIndex(rdx, rdx, times_2);
3151 __ leap(r14, Operand(rdi, smi_as_index.reg, smi_as_index.scale,
3152 SeqOneByteString::kHeaderSize - kHeapObjectTag));
3154 // Locate first character of result.
3155 __ leap(rdi, FieldOperand(rax, SeqTwoByteString::kHeaderSize));
3157 // rax: result string
3158 // rcx: result length
3159 // rdi: first character of result
3160 // r14: character of sub string start
3161 StringHelper::GenerateCopyCharacters(
3162 masm, rdi, r14, rcx, String::TWO_BYTE_ENCODING);
3163 __ IncrementCounter(counters->sub_string_native(), 1);
3164 __ ret(SUB_STRING_ARGUMENT_COUNT * kPointerSize);
3166 // Just jump to runtime to create the sub string.
3168 __ TailCallRuntime(Runtime::kSubString, 3, 1);
3170 __ bind(&single_char);
3172 // rbx: instance type
3173 // rcx: sub string length (smi)
3174 // rdx: from index (smi)
3175 StringCharAtGenerator generator(rax, rdx, rcx, rax, &runtime, &runtime,
3176 &runtime, STRING_INDEX_IS_NUMBER,
3177 RECEIVER_IS_STRING);
3178 generator.GenerateFast(masm);
3179 __ ret(SUB_STRING_ARGUMENT_COUNT * kPointerSize);
3180 generator.SkipSlow(masm, &runtime);
3184 void ToNumberStub::Generate(MacroAssembler* masm) {
3185 // The ToNumber stub takes one argument in rax.
3187 __ JumpIfNotSmi(rax, ¬_smi, Label::kNear);
3191 Label not_heap_number;
3192 __ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset),
3193 Heap::kHeapNumberMapRootIndex);
3194 __ j(not_equal, ¬_heap_number, Label::kNear);
3196 __ bind(¬_heap_number);
3198 Label not_string, slow_string;
3199 __ CmpObjectType(rax, FIRST_NONSTRING_TYPE, rdi);
3202 __ j(above_equal, ¬_string, Label::kNear);
3203 // Check if string has a cached array index.
3204 __ testl(FieldOperand(rax, String::kHashFieldOffset),
3205 Immediate(String::kContainsCachedArrayIndexMask));
3206 __ j(not_zero, &slow_string, Label::kNear);
3207 __ movl(rax, FieldOperand(rax, String::kHashFieldOffset));
3208 __ IndexFromHash(rax, rax);
3210 __ bind(&slow_string);
3211 __ PopReturnAddressTo(rcx); // Pop return address.
3212 __ Push(rax); // Push argument.
3213 __ PushReturnAddressFrom(rcx); // Push return address.
3214 __ TailCallRuntime(Runtime::kStringToNumber, 1, 1);
3215 __ bind(¬_string);
3218 __ CmpInstanceType(rdi, ODDBALL_TYPE);
3219 __ j(not_equal, ¬_oddball, Label::kNear);
3220 __ movp(rax, FieldOperand(rax, Oddball::kToNumberOffset));
3222 __ bind(¬_oddball);
3224 __ PopReturnAddressTo(rcx); // Pop return address.
3225 __ Push(rax); // Push argument.
3226 __ PushReturnAddressFrom(rcx); // Push return address.
3227 __ InvokeBuiltin(Context::TO_NUMBER_BUILTIN_INDEX, JUMP_FUNCTION);
3231 void StringHelper::GenerateFlatOneByteStringEquals(MacroAssembler* masm,
3235 Register scratch2) {
3236 Register length = scratch1;
3239 Label check_zero_length;
3240 __ movp(length, FieldOperand(left, String::kLengthOffset));
3241 __ SmiCompare(length, FieldOperand(right, String::kLengthOffset));
3242 __ j(equal, &check_zero_length, Label::kNear);
3243 __ Move(rax, Smi::FromInt(NOT_EQUAL));
3246 // Check if the length is zero.
3247 Label compare_chars;
3248 __ bind(&check_zero_length);
3249 STATIC_ASSERT(kSmiTag == 0);
3251 __ j(not_zero, &compare_chars, Label::kNear);
3252 __ Move(rax, Smi::FromInt(EQUAL));
3255 // Compare characters.
3256 __ bind(&compare_chars);
3257 Label strings_not_equal;
3258 GenerateOneByteCharsCompareLoop(masm, left, right, length, scratch2,
3259 &strings_not_equal, Label::kNear);
3261 // Characters are equal.
3262 __ Move(rax, Smi::FromInt(EQUAL));
3265 // Characters are not equal.
3266 __ bind(&strings_not_equal);
3267 __ Move(rax, Smi::FromInt(NOT_EQUAL));
3272 void StringHelper::GenerateCompareFlatOneByteStrings(
3273 MacroAssembler* masm, Register left, Register right, Register scratch1,
3274 Register scratch2, Register scratch3, Register scratch4) {
3275 // Ensure that you can always subtract a string length from a non-negative
3276 // number (e.g. another length).
3277 STATIC_ASSERT(String::kMaxLength < 0x7fffffff);
3279 // Find minimum length and length difference.
3280 __ movp(scratch1, FieldOperand(left, String::kLengthOffset));
3281 __ movp(scratch4, scratch1);
3284 FieldOperand(right, String::kLengthOffset));
3285 // Register scratch4 now holds left.length - right.length.
3286 const Register length_difference = scratch4;
3288 __ j(less, &left_shorter, Label::kNear);
3289 // The right string isn't longer that the left one.
3290 // Get the right string's length by subtracting the (non-negative) difference
3291 // from the left string's length.
3292 __ SmiSub(scratch1, scratch1, length_difference);
3293 __ bind(&left_shorter);
3294 // Register scratch1 now holds Min(left.length, right.length).
3295 const Register min_length = scratch1;
3297 Label compare_lengths;
3298 // If min-length is zero, go directly to comparing lengths.
3299 __ SmiTest(min_length);
3300 __ j(zero, &compare_lengths, Label::kNear);
3303 Label result_not_equal;
3304 GenerateOneByteCharsCompareLoop(
3305 masm, left, right, min_length, scratch2, &result_not_equal,
3306 // In debug-code mode, SmiTest below might push
3307 // the target label outside the near range.
3310 // Completed loop without finding different characters.
3311 // Compare lengths (precomputed).
3312 __ bind(&compare_lengths);
3313 __ SmiTest(length_difference);
3314 Label length_not_equal;
3315 __ j(not_zero, &length_not_equal, Label::kNear);
3318 __ Move(rax, Smi::FromInt(EQUAL));
3321 Label result_greater;
3323 __ bind(&length_not_equal);
3324 __ j(greater, &result_greater, Label::kNear);
3325 __ jmp(&result_less, Label::kNear);
3326 __ bind(&result_not_equal);
3327 // Unequal comparison of left to right, either character or length.
3328 __ j(above, &result_greater, Label::kNear);
3329 __ bind(&result_less);
3332 __ Move(rax, Smi::FromInt(LESS));
3335 // Result is GREATER.
3336 __ bind(&result_greater);
3337 __ Move(rax, Smi::FromInt(GREATER));
3342 void StringHelper::GenerateOneByteCharsCompareLoop(
3343 MacroAssembler* masm, Register left, Register right, Register length,
3344 Register scratch, Label* chars_not_equal, Label::Distance near_jump) {
3345 // Change index to run from -length to -1 by adding length to string
3346 // start. This means that loop ends when index reaches zero, which
3347 // doesn't need an additional compare.
3348 __ SmiToInteger32(length, length);
3350 FieldOperand(left, length, times_1, SeqOneByteString::kHeaderSize));
3352 FieldOperand(right, length, times_1, SeqOneByteString::kHeaderSize));
3354 Register index = length; // index = -length;
3359 __ movb(scratch, Operand(left, index, times_1, 0));
3360 __ cmpb(scratch, Operand(right, index, times_1, 0));
3361 __ j(not_equal, chars_not_equal, near_jump);
3363 __ j(not_zero, &loop);
3367 void StringCompareStub::Generate(MacroAssembler* masm) {
3370 // Stack frame on entry.
3371 // rsp[0] : return address
3372 // rsp[8] : right string
3373 // rsp[16] : left string
3375 StackArgumentsAccessor args(rsp, 2, ARGUMENTS_DONT_CONTAIN_RECEIVER);
3376 __ movp(rdx, args.GetArgumentOperand(0)); // left
3377 __ movp(rax, args.GetArgumentOperand(1)); // right
3379 // Check for identity.
3382 __ j(not_equal, ¬_same, Label::kNear);
3383 __ Move(rax, Smi::FromInt(EQUAL));
3384 Counters* counters = isolate()->counters();
3385 __ IncrementCounter(counters->string_compare_native(), 1);
3386 __ ret(2 * kPointerSize);
3390 // Check that both are sequential one-byte strings.
3391 __ JumpIfNotBothSequentialOneByteStrings(rdx, rax, rcx, rbx, &runtime);
3393 // Inline comparison of one-byte strings.
3394 __ IncrementCounter(counters->string_compare_native(), 1);
3395 // Drop arguments from the stack
3396 __ PopReturnAddressTo(rcx);
3397 __ addp(rsp, Immediate(2 * kPointerSize));
3398 __ PushReturnAddressFrom(rcx);
3399 StringHelper::GenerateCompareFlatOneByteStrings(masm, rdx, rax, rcx, rbx, rdi,
3402 // Call the runtime; it returns -1 (less), 0 (equal), or 1 (greater)
3403 // tagged as a small integer.
3405 __ TailCallRuntime(Runtime::kStringCompare, 2, 1);
3409 void BinaryOpICWithAllocationSiteStub::Generate(MacroAssembler* masm) {
3410 // ----------- S t a t e -------------
3413 // -- rsp[0] : return address
3414 // -----------------------------------
3416 // Load rcx with the allocation site. We stick an undefined dummy value here
3417 // and replace it with the real allocation site later when we instantiate this
3418 // stub in BinaryOpICWithAllocationSiteStub::GetCodeCopyFromTemplate().
3419 __ Move(rcx, handle(isolate()->heap()->undefined_value()));
3421 // Make sure that we actually patched the allocation site.
3422 if (FLAG_debug_code) {
3423 __ testb(rcx, Immediate(kSmiTagMask));
3424 __ Assert(not_equal, kExpectedAllocationSite);
3425 __ Cmp(FieldOperand(rcx, HeapObject::kMapOffset),
3426 isolate()->factory()->allocation_site_map());
3427 __ Assert(equal, kExpectedAllocationSite);
3430 // Tail call into the stub that handles binary operations with allocation
3432 BinaryOpWithAllocationSiteStub stub(isolate(), state());
3433 __ TailCallStub(&stub);
3437 void CompareICStub::GenerateSmis(MacroAssembler* masm) {
3438 DCHECK(state() == CompareICState::SMI);
3440 __ JumpIfNotBothSmi(rdx, rax, &miss, Label::kNear);
3442 if (GetCondition() == equal) {
3443 // For equality we do not care about the sign of the result.
3448 __ j(no_overflow, &done, Label::kNear);
3449 // Correct sign of result in case of overflow.
3461 void CompareICStub::GenerateNumbers(MacroAssembler* masm) {
3462 DCHECK(state() == CompareICState::NUMBER);
3465 Label unordered, maybe_undefined1, maybe_undefined2;
3468 if (left() == CompareICState::SMI) {
3469 __ JumpIfNotSmi(rdx, &miss);
3471 if (right() == CompareICState::SMI) {
3472 __ JumpIfNotSmi(rax, &miss);
3475 // Load left and right operand.
3476 Label done, left, left_smi, right_smi;
3477 __ JumpIfSmi(rax, &right_smi, Label::kNear);
3478 __ CompareMap(rax, isolate()->factory()->heap_number_map());
3479 __ j(not_equal, &maybe_undefined1, Label::kNear);
3480 __ movsd(xmm1, FieldOperand(rax, HeapNumber::kValueOffset));
3481 __ jmp(&left, Label::kNear);
3482 __ bind(&right_smi);
3483 __ SmiToInteger32(rcx, rax); // Can't clobber rax yet.
3484 __ Cvtlsi2sd(xmm1, rcx);
3487 __ JumpIfSmi(rdx, &left_smi, Label::kNear);
3488 __ CompareMap(rdx, isolate()->factory()->heap_number_map());
3489 __ j(not_equal, &maybe_undefined2, Label::kNear);
3490 __ movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset));
3493 __ SmiToInteger32(rcx, rdx); // Can't clobber rdx yet.
3494 __ Cvtlsi2sd(xmm0, rcx);
3498 __ ucomisd(xmm0, xmm1);
3500 // Don't base result on EFLAGS when a NaN is involved.
3501 __ j(parity_even, &unordered, Label::kNear);
3503 // Return a result of -1, 0, or 1, based on EFLAGS.
3504 // Performing mov, because xor would destroy the flag register.
3505 __ movl(rax, Immediate(0));
3506 __ movl(rcx, Immediate(0));
3507 __ setcc(above, rax); // Add one to zero if carry clear and not equal.
3508 __ sbbp(rax, rcx); // Subtract one if below (aka. carry set).
3511 __ bind(&unordered);
3512 __ bind(&generic_stub);
3513 CompareICStub stub(isolate(), op(), strength(), CompareICState::GENERIC,
3514 CompareICState::GENERIC, CompareICState::GENERIC);
3515 __ jmp(stub.GetCode(), RelocInfo::CODE_TARGET);
3517 __ bind(&maybe_undefined1);
3518 if (Token::IsOrderedRelationalCompareOp(op())) {
3519 __ Cmp(rax, isolate()->factory()->undefined_value());
3520 __ j(not_equal, &miss);
3521 __ JumpIfSmi(rdx, &unordered);
3522 __ CmpObjectType(rdx, HEAP_NUMBER_TYPE, rcx);
3523 __ j(not_equal, &maybe_undefined2, Label::kNear);
3527 __ bind(&maybe_undefined2);
3528 if (Token::IsOrderedRelationalCompareOp(op())) {
3529 __ Cmp(rdx, isolate()->factory()->undefined_value());
3530 __ j(equal, &unordered);
3538 void CompareICStub::GenerateInternalizedStrings(MacroAssembler* masm) {
3539 DCHECK(state() == CompareICState::INTERNALIZED_STRING);
3540 DCHECK(GetCondition() == equal);
3542 // Registers containing left and right operands respectively.
3543 Register left = rdx;
3544 Register right = rax;
3545 Register tmp1 = rcx;
3546 Register tmp2 = rbx;
3548 // Check that both operands are heap objects.
3550 Condition cond = masm->CheckEitherSmi(left, right, tmp1);
3551 __ j(cond, &miss, Label::kNear);
3553 // Check that both operands are internalized strings.
3554 __ movp(tmp1, FieldOperand(left, HeapObject::kMapOffset));
3555 __ movp(tmp2, FieldOperand(right, HeapObject::kMapOffset));
3556 __ movzxbp(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset));
3557 __ movzxbp(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset));
3558 STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0);
3560 __ testb(tmp1, Immediate(kIsNotStringMask | kIsNotInternalizedMask));
3561 __ j(not_zero, &miss, Label::kNear);
3563 // Internalized strings are compared by identity.
3565 __ cmpp(left, right);
3566 // Make sure rax is non-zero. At this point input operands are
3567 // guaranteed to be non-zero.
3568 DCHECK(right.is(rax));
3569 __ j(not_equal, &done, Label::kNear);
3570 STATIC_ASSERT(EQUAL == 0);
3571 STATIC_ASSERT(kSmiTag == 0);
3572 __ Move(rax, Smi::FromInt(EQUAL));
3581 void CompareICStub::GenerateUniqueNames(MacroAssembler* masm) {
3582 DCHECK(state() == CompareICState::UNIQUE_NAME);
3583 DCHECK(GetCondition() == equal);
3585 // Registers containing left and right operands respectively.
3586 Register left = rdx;
3587 Register right = rax;
3588 Register tmp1 = rcx;
3589 Register tmp2 = rbx;
3591 // Check that both operands are heap objects.
3593 Condition cond = masm->CheckEitherSmi(left, right, tmp1);
3594 __ j(cond, &miss, Label::kNear);
3596 // Check that both operands are unique names. This leaves the instance
3597 // types loaded in tmp1 and tmp2.
3598 __ movp(tmp1, FieldOperand(left, HeapObject::kMapOffset));
3599 __ movp(tmp2, FieldOperand(right, HeapObject::kMapOffset));
3600 __ movzxbp(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset));
3601 __ movzxbp(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset));
3603 __ JumpIfNotUniqueNameInstanceType(tmp1, &miss, Label::kNear);
3604 __ JumpIfNotUniqueNameInstanceType(tmp2, &miss, Label::kNear);
3606 // Unique names are compared by identity.
3608 __ cmpp(left, right);
3609 // Make sure rax is non-zero. At this point input operands are
3610 // guaranteed to be non-zero.
3611 DCHECK(right.is(rax));
3612 __ j(not_equal, &done, Label::kNear);
3613 STATIC_ASSERT(EQUAL == 0);
3614 STATIC_ASSERT(kSmiTag == 0);
3615 __ Move(rax, Smi::FromInt(EQUAL));
3624 void CompareICStub::GenerateStrings(MacroAssembler* masm) {
3625 DCHECK(state() == CompareICState::STRING);
3628 bool equality = Token::IsEqualityOp(op());
3630 // Registers containing left and right operands respectively.
3631 Register left = rdx;
3632 Register right = rax;
3633 Register tmp1 = rcx;
3634 Register tmp2 = rbx;
3635 Register tmp3 = rdi;
3637 // Check that both operands are heap objects.
3638 Condition cond = masm->CheckEitherSmi(left, right, tmp1);
3641 // Check that both operands are strings. This leaves the instance
3642 // types loaded in tmp1 and tmp2.
3643 __ movp(tmp1, FieldOperand(left, HeapObject::kMapOffset));
3644 __ movp(tmp2, FieldOperand(right, HeapObject::kMapOffset));
3645 __ movzxbp(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset));
3646 __ movzxbp(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset));
3647 __ movp(tmp3, tmp1);
3648 STATIC_ASSERT(kNotStringTag != 0);
3650 __ testb(tmp3, Immediate(kIsNotStringMask));
3651 __ j(not_zero, &miss);
3653 // Fast check for identical strings.
3655 __ cmpp(left, right);
3656 __ j(not_equal, ¬_same, Label::kNear);
3657 STATIC_ASSERT(EQUAL == 0);
3658 STATIC_ASSERT(kSmiTag == 0);
3659 __ Move(rax, Smi::FromInt(EQUAL));
3662 // Handle not identical strings.
3665 // Check that both strings are internalized strings. If they are, we're done
3666 // because we already know they are not identical. We also know they are both
3670 STATIC_ASSERT(kInternalizedTag == 0);
3672 __ testb(tmp1, Immediate(kIsNotInternalizedMask));
3673 __ j(not_zero, &do_compare, Label::kNear);
3674 // Make sure rax is non-zero. At this point input operands are
3675 // guaranteed to be non-zero.
3676 DCHECK(right.is(rax));
3678 __ bind(&do_compare);
3681 // Check that both strings are sequential one-byte.
3683 __ JumpIfNotBothSequentialOneByteStrings(left, right, tmp1, tmp2, &runtime);
3685 // Compare flat one-byte strings. Returns when done.
3687 StringHelper::GenerateFlatOneByteStringEquals(masm, left, right, tmp1,
3690 StringHelper::GenerateCompareFlatOneByteStrings(
3691 masm, left, right, tmp1, tmp2, tmp3, kScratchRegister);
3694 // Handle more complex cases in runtime.
3696 __ PopReturnAddressTo(tmp1);
3699 __ PushReturnAddressFrom(tmp1);
3701 __ TailCallRuntime(Runtime::kStringEquals, 2, 1);
3703 __ TailCallRuntime(Runtime::kStringCompare, 2, 1);
3711 void CompareICStub::GenerateObjects(MacroAssembler* masm) {
3712 DCHECK(state() == CompareICState::OBJECT);
3714 Condition either_smi = masm->CheckEitherSmi(rdx, rax);
3715 __ j(either_smi, &miss, Label::kNear);
3717 __ CmpObjectType(rax, JS_OBJECT_TYPE, rcx);
3718 __ j(not_equal, &miss, Label::kNear);
3719 __ CmpObjectType(rdx, JS_OBJECT_TYPE, rcx);
3720 __ j(not_equal, &miss, Label::kNear);
3722 DCHECK(GetCondition() == equal);
3731 void CompareICStub::GenerateKnownObjects(MacroAssembler* masm) {
3733 Handle<WeakCell> cell = Map::WeakCellForMap(known_map_);
3734 Condition either_smi = masm->CheckEitherSmi(rdx, rax);
3735 __ j(either_smi, &miss, Label::kNear);
3737 __ GetWeakValue(rdi, cell);
3738 __ movp(rcx, FieldOperand(rax, HeapObject::kMapOffset));
3739 __ movp(rbx, FieldOperand(rdx, HeapObject::kMapOffset));
3741 __ j(not_equal, &miss, Label::kNear);
3743 __ j(not_equal, &miss, Label::kNear);
3753 void CompareICStub::GenerateMiss(MacroAssembler* masm) {
3755 // Call the runtime system in a fresh internal frame.
3756 FrameScope scope(masm, StackFrame::INTERNAL);
3761 __ Push(Smi::FromInt(op()));
3762 __ CallRuntime(Runtime::kCompareIC_Miss, 3);
3764 // Compute the entry point of the rewritten stub.
3765 __ leap(rdi, FieldOperand(rax, Code::kHeaderSize));
3770 // Do a tail call to the rewritten stub.
3775 void NameDictionaryLookupStub::GenerateNegativeLookup(MacroAssembler* masm,
3778 Register properties,
3781 DCHECK(name->IsUniqueName());
3782 // If names of slots in range from 1 to kProbes - 1 for the hash value are
3783 // not equal to the name and kProbes-th slot is not used (its name is the
3784 // undefined value), it guarantees the hash table doesn't contain the
3785 // property. It's true even if some slots represent deleted properties
3786 // (their names are the hole value).
3787 for (int i = 0; i < kInlinedProbes; i++) {
3788 // r0 points to properties hash.
3789 // Compute the masked index: (hash + i + i * i) & mask.
3790 Register index = r0;
3791 // Capacity is smi 2^n.
3792 __ SmiToInteger32(index, FieldOperand(properties, kCapacityOffset));
3795 Immediate(name->Hash() + NameDictionary::GetProbeOffset(i)));
3797 // Scale the index by multiplying by the entry size.
3798 STATIC_ASSERT(NameDictionary::kEntrySize == 3);
3799 __ leap(index, Operand(index, index, times_2, 0)); // index *= 3.
3801 Register entity_name = r0;
3802 // Having undefined at this place means the name is not contained.
3803 STATIC_ASSERT(kSmiTagSize == 1);
3804 __ movp(entity_name, Operand(properties,
3807 kElementsStartOffset - kHeapObjectTag));
3808 __ Cmp(entity_name, masm->isolate()->factory()->undefined_value());
3811 // Stop if found the property.
3812 __ Cmp(entity_name, Handle<Name>(name));
3816 // Check for the hole and skip.
3817 __ CompareRoot(entity_name, Heap::kTheHoleValueRootIndex);
3818 __ j(equal, &good, Label::kNear);
3820 // Check if the entry name is not a unique name.
3821 __ movp(entity_name, FieldOperand(entity_name, HeapObject::kMapOffset));
3822 __ JumpIfNotUniqueNameInstanceType(
3823 FieldOperand(entity_name, Map::kInstanceTypeOffset), miss);
3827 NameDictionaryLookupStub stub(masm->isolate(), properties, r0, r0,
3829 __ Push(Handle<Object>(name));
3830 __ Push(Immediate(name->Hash()));
3833 __ j(not_zero, miss);
3838 // Probe the name dictionary in the |elements| register. Jump to the
3839 // |done| label if a property with the given name is found leaving the
3840 // index into the dictionary in |r1|. Jump to the |miss| label
3842 void NameDictionaryLookupStub::GeneratePositiveLookup(MacroAssembler* masm,
3849 DCHECK(!elements.is(r0));
3850 DCHECK(!elements.is(r1));
3851 DCHECK(!name.is(r0));
3852 DCHECK(!name.is(r1));
3854 __ AssertName(name);
3856 __ SmiToInteger32(r0, FieldOperand(elements, kCapacityOffset));
3859 for (int i = 0; i < kInlinedProbes; i++) {
3860 // Compute the masked index: (hash + i + i * i) & mask.
3861 __ movl(r1, FieldOperand(name, Name::kHashFieldOffset));
3862 __ shrl(r1, Immediate(Name::kHashShift));
3864 __ addl(r1, Immediate(NameDictionary::GetProbeOffset(i)));
3868 // Scale the index by multiplying by the entry size.
3869 STATIC_ASSERT(NameDictionary::kEntrySize == 3);
3870 __ leap(r1, Operand(r1, r1, times_2, 0)); // r1 = r1 * 3
3872 // Check if the key is identical to the name.
3873 __ cmpp(name, Operand(elements, r1, times_pointer_size,
3874 kElementsStartOffset - kHeapObjectTag));
3878 NameDictionaryLookupStub stub(masm->isolate(), elements, r0, r1,
3881 __ movl(r0, FieldOperand(name, Name::kHashFieldOffset));
3882 __ shrl(r0, Immediate(Name::kHashShift));
3892 void NameDictionaryLookupStub::Generate(MacroAssembler* masm) {
3893 // This stub overrides SometimesSetsUpAFrame() to return false. That means
3894 // we cannot call anything that could cause a GC from this stub.
3895 // Stack frame on entry:
3896 // rsp[0 * kPointerSize] : return address.
3897 // rsp[1 * kPointerSize] : key's hash.
3898 // rsp[2 * kPointerSize] : key.
3900 // dictionary_: NameDictionary to probe.
3901 // result_: used as scratch.
3902 // index_: will hold an index of entry if lookup is successful.
3903 // might alias with result_.
3905 // result_ is zero if lookup failed, non zero otherwise.
3907 Label in_dictionary, maybe_in_dictionary, not_in_dictionary;
3909 Register scratch = result();
3911 __ SmiToInteger32(scratch, FieldOperand(dictionary(), kCapacityOffset));
3915 // If names of slots in range from 1 to kProbes - 1 for the hash value are
3916 // not equal to the name and kProbes-th slot is not used (its name is the
3917 // undefined value), it guarantees the hash table doesn't contain the
3918 // property. It's true even if some slots represent deleted properties
3919 // (their names are the null value).
3920 StackArgumentsAccessor args(rsp, 2, ARGUMENTS_DONT_CONTAIN_RECEIVER,
3922 for (int i = kInlinedProbes; i < kTotalProbes; i++) {
3923 // Compute the masked index: (hash + i + i * i) & mask.
3924 __ movp(scratch, args.GetArgumentOperand(1));
3926 __ addl(scratch, Immediate(NameDictionary::GetProbeOffset(i)));
3928 __ andp(scratch, Operand(rsp, 0));
3930 // Scale the index by multiplying by the entry size.
3931 STATIC_ASSERT(NameDictionary::kEntrySize == 3);
3932 __ leap(index(), Operand(scratch, scratch, times_2, 0)); // index *= 3.
3934 // Having undefined at this place means the name is not contained.
3935 __ movp(scratch, Operand(dictionary(), index(), times_pointer_size,
3936 kElementsStartOffset - kHeapObjectTag));
3938 __ Cmp(scratch, isolate()->factory()->undefined_value());
3939 __ j(equal, ¬_in_dictionary);
3941 // Stop if found the property.
3942 __ cmpp(scratch, args.GetArgumentOperand(0));
3943 __ j(equal, &in_dictionary);
3945 if (i != kTotalProbes - 1 && mode() == NEGATIVE_LOOKUP) {
3946 // If we hit a key that is not a unique name during negative
3947 // lookup we have to bailout as this key might be equal to the
3948 // key we are looking for.
3950 // Check if the entry name is not a unique name.
3951 __ movp(scratch, FieldOperand(scratch, HeapObject::kMapOffset));
3952 __ JumpIfNotUniqueNameInstanceType(
3953 FieldOperand(scratch, Map::kInstanceTypeOffset),
3954 &maybe_in_dictionary);
3958 __ bind(&maybe_in_dictionary);
3959 // If we are doing negative lookup then probing failure should be
3960 // treated as a lookup success. For positive lookup probing failure
3961 // should be treated as lookup failure.
3962 if (mode() == POSITIVE_LOOKUP) {
3963 __ movp(scratch, Immediate(0));
3965 __ ret(2 * kPointerSize);
3968 __ bind(&in_dictionary);
3969 __ movp(scratch, Immediate(1));
3971 __ ret(2 * kPointerSize);
3973 __ bind(¬_in_dictionary);
3974 __ movp(scratch, Immediate(0));
3976 __ ret(2 * kPointerSize);
3980 void StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(
3982 StoreBufferOverflowStub stub1(isolate, kDontSaveFPRegs);
3984 StoreBufferOverflowStub stub2(isolate, kSaveFPRegs);
3989 // Takes the input in 3 registers: address_ value_ and object_. A pointer to
3990 // the value has just been written into the object, now this stub makes sure
3991 // we keep the GC informed. The word in the object where the value has been
3992 // written is in the address register.
3993 void RecordWriteStub::Generate(MacroAssembler* masm) {
3994 Label skip_to_incremental_noncompacting;
3995 Label skip_to_incremental_compacting;
3997 // The first two instructions are generated with labels so as to get the
3998 // offset fixed up correctly by the bind(Label*) call. We patch it back and
3999 // forth between a compare instructions (a nop in this position) and the
4000 // real branch when we start and stop incremental heap marking.
4001 // See RecordWriteStub::Patch for details.
4002 __ jmp(&skip_to_incremental_noncompacting, Label::kNear);
4003 __ jmp(&skip_to_incremental_compacting, Label::kFar);
4005 if (remembered_set_action() == EMIT_REMEMBERED_SET) {
4006 __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(),
4007 MacroAssembler::kReturnAtEnd);
4012 __ bind(&skip_to_incremental_noncompacting);
4013 GenerateIncremental(masm, INCREMENTAL);
4015 __ bind(&skip_to_incremental_compacting);
4016 GenerateIncremental(masm, INCREMENTAL_COMPACTION);
4018 // Initial mode of the stub is expected to be STORE_BUFFER_ONLY.
4019 // Will be checked in IncrementalMarking::ActivateGeneratedStub.
4020 masm->set_byte_at(0, kTwoByteNopInstruction);
4021 masm->set_byte_at(2, kFiveByteNopInstruction);
4025 void RecordWriteStub::GenerateIncremental(MacroAssembler* masm, Mode mode) {
4028 if (remembered_set_action() == EMIT_REMEMBERED_SET) {
4029 Label dont_need_remembered_set;
4031 __ movp(regs_.scratch0(), Operand(regs_.address(), 0));
4032 __ JumpIfNotInNewSpace(regs_.scratch0(),
4034 &dont_need_remembered_set);
4036 __ CheckPageFlag(regs_.object(),
4038 1 << MemoryChunk::SCAN_ON_SCAVENGE,
4040 &dont_need_remembered_set);
4042 // First notify the incremental marker if necessary, then update the
4044 CheckNeedsToInformIncrementalMarker(
4045 masm, kUpdateRememberedSetOnNoNeedToInformIncrementalMarker, mode);
4046 InformIncrementalMarker(masm);
4047 regs_.Restore(masm);
4048 __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(),
4049 MacroAssembler::kReturnAtEnd);
4051 __ bind(&dont_need_remembered_set);
4054 CheckNeedsToInformIncrementalMarker(
4055 masm, kReturnOnNoNeedToInformIncrementalMarker, mode);
4056 InformIncrementalMarker(masm);
4057 regs_.Restore(masm);
4062 void RecordWriteStub::InformIncrementalMarker(MacroAssembler* masm) {
4063 regs_.SaveCallerSaveRegisters(masm, save_fp_regs_mode());
4065 arg_reg_1.is(regs_.address()) ? kScratchRegister : regs_.address();
4066 DCHECK(!address.is(regs_.object()));
4067 DCHECK(!address.is(arg_reg_1));
4068 __ Move(address, regs_.address());
4069 __ Move(arg_reg_1, regs_.object());
4070 // TODO(gc) Can we just set address arg2 in the beginning?
4071 __ Move(arg_reg_2, address);
4072 __ LoadAddress(arg_reg_3,
4073 ExternalReference::isolate_address(isolate()));
4074 int argument_count = 3;
4076 AllowExternalCallThatCantCauseGC scope(masm);
4077 __ PrepareCallCFunction(argument_count);
4079 ExternalReference::incremental_marking_record_write_function(isolate()),
4081 regs_.RestoreCallerSaveRegisters(masm, save_fp_regs_mode());
4085 void RecordWriteStub::CheckNeedsToInformIncrementalMarker(
4086 MacroAssembler* masm,
4087 OnNoNeedToInformIncrementalMarker on_no_need,
4090 Label need_incremental;
4091 Label need_incremental_pop_object;
4093 __ movp(regs_.scratch0(), Immediate(~Page::kPageAlignmentMask));
4094 __ andp(regs_.scratch0(), regs_.object());
4095 __ movp(regs_.scratch1(),
4096 Operand(regs_.scratch0(),
4097 MemoryChunk::kWriteBarrierCounterOffset));
4098 __ subp(regs_.scratch1(), Immediate(1));
4099 __ movp(Operand(regs_.scratch0(),
4100 MemoryChunk::kWriteBarrierCounterOffset),
4102 __ j(negative, &need_incremental);
4104 // Let's look at the color of the object: If it is not black we don't have
4105 // to inform the incremental marker.
4106 __ JumpIfBlack(regs_.object(),
4112 regs_.Restore(masm);
4113 if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) {
4114 __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(),
4115 MacroAssembler::kReturnAtEnd);
4122 // Get the value from the slot.
4123 __ movp(regs_.scratch0(), Operand(regs_.address(), 0));
4125 if (mode == INCREMENTAL_COMPACTION) {
4126 Label ensure_not_white;
4128 __ CheckPageFlag(regs_.scratch0(), // Contains value.
4129 regs_.scratch1(), // Scratch.
4130 MemoryChunk::kEvacuationCandidateMask,
4135 __ CheckPageFlag(regs_.object(),
4136 regs_.scratch1(), // Scratch.
4137 MemoryChunk::kSkipEvacuationSlotsRecordingMask,
4141 __ bind(&ensure_not_white);
4144 // We need an extra register for this, so we push the object register
4146 __ Push(regs_.object());
4147 __ EnsureNotWhite(regs_.scratch0(), // The value.
4148 regs_.scratch1(), // Scratch.
4149 regs_.object(), // Scratch.
4150 &need_incremental_pop_object,
4152 __ Pop(regs_.object());
4154 regs_.Restore(masm);
4155 if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) {
4156 __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(),
4157 MacroAssembler::kReturnAtEnd);
4162 __ bind(&need_incremental_pop_object);
4163 __ Pop(regs_.object());
4165 __ bind(&need_incremental);
4167 // Fall through when we need to inform the incremental marker.
4171 void StoreArrayLiteralElementStub::Generate(MacroAssembler* masm) {
4172 // ----------- S t a t e -------------
4173 // -- rax : element value to store
4174 // -- rcx : element index as smi
4175 // -- rsp[0] : return address
4176 // -- rsp[8] : array literal index in function
4177 // -- rsp[16] : array literal
4178 // clobbers rbx, rdx, rdi
4179 // -----------------------------------
4182 Label double_elements;
4184 Label slow_elements;
4185 Label fast_elements;
4187 // Get array literal index, array literal and its map.
4188 StackArgumentsAccessor args(rsp, 2, ARGUMENTS_DONT_CONTAIN_RECEIVER);
4189 __ movp(rdx, args.GetArgumentOperand(1));
4190 __ movp(rbx, args.GetArgumentOperand(0));
4191 __ movp(rdi, FieldOperand(rbx, JSObject::kMapOffset));
4193 __ CheckFastElements(rdi, &double_elements);
4195 // FAST_*_SMI_ELEMENTS or FAST_*_ELEMENTS
4196 __ JumpIfSmi(rax, &smi_element);
4197 __ CheckFastSmiElements(rdi, &fast_elements);
4199 // Store into the array literal requires a elements transition. Call into
4202 __ bind(&slow_elements);
4203 __ PopReturnAddressTo(rdi);
4207 __ movp(rbx, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
4208 __ Push(FieldOperand(rbx, JSFunction::kLiteralsOffset));
4210 __ PushReturnAddressFrom(rdi);
4211 __ TailCallRuntime(Runtime::kStoreArrayLiteralElement, 5, 1);
4213 // Array literal has ElementsKind of FAST_*_ELEMENTS and value is an object.
4214 __ bind(&fast_elements);
4215 __ SmiToInteger32(kScratchRegister, rcx);
4216 __ movp(rbx, FieldOperand(rbx, JSObject::kElementsOffset));
4217 __ leap(rcx, FieldOperand(rbx, kScratchRegister, times_pointer_size,
4218 FixedArrayBase::kHeaderSize));
4219 __ movp(Operand(rcx, 0), rax);
4220 // Update the write barrier for the array store.
4221 __ RecordWrite(rbx, rcx, rax,
4223 EMIT_REMEMBERED_SET,
4227 // Array literal has ElementsKind of FAST_*_SMI_ELEMENTS or
4228 // FAST_*_ELEMENTS, and value is Smi.
4229 __ bind(&smi_element);
4230 __ SmiToInteger32(kScratchRegister, rcx);
4231 __ movp(rbx, FieldOperand(rbx, JSObject::kElementsOffset));
4232 __ movp(FieldOperand(rbx, kScratchRegister, times_pointer_size,
4233 FixedArrayBase::kHeaderSize), rax);
4236 // Array literal has ElementsKind of FAST_DOUBLE_ELEMENTS.
4237 __ bind(&double_elements);
4239 __ movp(r9, FieldOperand(rbx, JSObject::kElementsOffset));
4240 __ SmiToInteger32(r11, rcx);
4241 __ StoreNumberToDoubleElements(rax,
4250 void StubFailureTrampolineStub::Generate(MacroAssembler* masm) {
4251 CEntryStub ces(isolate(), 1, kSaveFPRegs);
4252 __ Call(ces.GetCode(), RelocInfo::CODE_TARGET);
4253 int parameter_count_offset =
4254 StubFailureTrampolineFrame::kCallerStackParameterCountFrameOffset;
4255 __ movp(rbx, MemOperand(rbp, parameter_count_offset));
4256 masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE);
4257 __ PopReturnAddressTo(rcx);
4258 int additional_offset =
4259 function_mode() == JS_FUNCTION_STUB_MODE ? kPointerSize : 0;
4260 __ leap(rsp, MemOperand(rsp, rbx, times_pointer_size, additional_offset));
4261 __ jmp(rcx); // Return to IC Miss stub, continuation still on stack.
4265 void LoadICTrampolineStub::Generate(MacroAssembler* masm) {
4266 EmitLoadTypeFeedbackVector(masm, LoadWithVectorDescriptor::VectorRegister());
4267 LoadICStub stub(isolate(), state());
4268 stub.GenerateForTrampoline(masm);
4272 void KeyedLoadICTrampolineStub::Generate(MacroAssembler* masm) {
4273 EmitLoadTypeFeedbackVector(masm, LoadWithVectorDescriptor::VectorRegister());
4274 KeyedLoadICStub stub(isolate(), state());
4275 stub.GenerateForTrampoline(masm);
4279 static void HandleArrayCases(MacroAssembler* masm, Register receiver,
4280 Register key, Register vector, Register slot,
4281 Register feedback, Register receiver_map,
4282 Register scratch1, Register scratch2,
4283 Register scratch3, bool is_polymorphic,
4285 // feedback initially contains the feedback array
4286 Label next_loop, prepare_next;
4287 Label start_polymorphic;
4289 Register counter = scratch1;
4290 Register length = scratch2;
4291 Register cached_map = scratch3;
4293 __ movp(cached_map, FieldOperand(feedback, FixedArray::OffsetOfElementAt(0)));
4294 __ cmpp(receiver_map, FieldOperand(cached_map, WeakCell::kValueOffset));
4295 __ j(not_equal, &start_polymorphic);
4297 // found, now call handler.
4298 Register handler = feedback;
4299 __ movp(handler, FieldOperand(feedback, FixedArray::OffsetOfElementAt(1)));
4300 __ leap(handler, FieldOperand(handler, Code::kHeaderSize));
4303 // Polymorphic, we have to loop from 2 to N
4304 __ bind(&start_polymorphic);
4305 __ SmiToInteger32(length, FieldOperand(feedback, FixedArray::kLengthOffset));
4306 if (!is_polymorphic) {
4307 // If the IC could be monomorphic we have to make sure we don't go past the
4308 // end of the feedback array.
4309 __ cmpl(length, Immediate(2));
4312 __ movl(counter, Immediate(2));
4314 __ bind(&next_loop);
4315 __ movp(cached_map, FieldOperand(feedback, counter, times_pointer_size,
4316 FixedArray::kHeaderSize));
4317 __ cmpp(receiver_map, FieldOperand(cached_map, WeakCell::kValueOffset));
4318 __ j(not_equal, &prepare_next);
4319 __ movp(handler, FieldOperand(feedback, counter, times_pointer_size,
4320 FixedArray::kHeaderSize + kPointerSize));
4321 __ leap(handler, FieldOperand(handler, Code::kHeaderSize));
4324 __ bind(&prepare_next);
4325 __ addl(counter, Immediate(2));
4326 __ cmpl(counter, length);
4327 __ j(less, &next_loop);
4329 // We exhausted our array of map handler pairs.
4334 static void HandleMonomorphicCase(MacroAssembler* masm, Register receiver,
4335 Register receiver_map, Register feedback,
4336 Register vector, Register integer_slot,
4337 Label* compare_map, Label* load_smi_map,
4339 __ JumpIfSmi(receiver, load_smi_map);
4340 __ movp(receiver_map, FieldOperand(receiver, 0));
4342 __ bind(compare_map);
4343 __ cmpp(receiver_map, FieldOperand(feedback, WeakCell::kValueOffset));
4344 __ j(not_equal, try_array);
4345 Register handler = feedback;
4346 __ movp(handler, FieldOperand(vector, integer_slot, times_pointer_size,
4347 FixedArray::kHeaderSize + kPointerSize));
4348 __ leap(handler, FieldOperand(handler, Code::kHeaderSize));
4353 void LoadICStub::Generate(MacroAssembler* masm) { GenerateImpl(masm, false); }
4356 void LoadICStub::GenerateForTrampoline(MacroAssembler* masm) {
4357 GenerateImpl(masm, true);
4361 void LoadICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) {
4362 Register receiver = LoadWithVectorDescriptor::ReceiverRegister(); // rdx
4363 Register name = LoadWithVectorDescriptor::NameRegister(); // rcx
4364 Register vector = LoadWithVectorDescriptor::VectorRegister(); // rbx
4365 Register slot = LoadWithVectorDescriptor::SlotRegister(); // rax
4366 Register feedback = rdi;
4367 Register integer_slot = r8;
4368 Register receiver_map = r9;
4370 __ SmiToInteger32(integer_slot, slot);
4371 __ movp(feedback, FieldOperand(vector, integer_slot, times_pointer_size,
4372 FixedArray::kHeaderSize));
4374 // Try to quickly handle the monomorphic case without knowing for sure
4375 // if we have a weak cell in feedback. We do know it's safe to look
4376 // at WeakCell::kValueOffset.
4377 Label try_array, load_smi_map, compare_map;
4378 Label not_array, miss;
4379 HandleMonomorphicCase(masm, receiver, receiver_map, feedback, vector,
4380 integer_slot, &compare_map, &load_smi_map, &try_array);
4382 // Is it a fixed array?
4383 __ bind(&try_array);
4384 __ CompareRoot(FieldOperand(feedback, 0), Heap::kFixedArrayMapRootIndex);
4385 __ j(not_equal, ¬_array);
4386 HandleArrayCases(masm, receiver, name, vector, slot, feedback, receiver_map,
4387 integer_slot, r11, r15, true, &miss);
4389 __ bind(¬_array);
4390 __ CompareRoot(feedback, Heap::kmegamorphic_symbolRootIndex);
4391 __ j(not_equal, &miss);
4392 Code::Flags code_flags = Code::RemoveTypeAndHolderFromFlags(
4393 Code::ComputeHandlerFlags(Code::LOAD_IC));
4394 masm->isolate()->stub_cache()->GenerateProbe(
4395 masm, Code::LOAD_IC, code_flags, receiver, name, feedback, no_reg);
4398 LoadIC::GenerateMiss(masm);
4400 __ bind(&load_smi_map);
4401 __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex);
4402 __ jmp(&compare_map);
4406 void KeyedLoadICStub::Generate(MacroAssembler* masm) {
4407 GenerateImpl(masm, false);
4411 void KeyedLoadICStub::GenerateForTrampoline(MacroAssembler* masm) {
4412 GenerateImpl(masm, true);
4416 void KeyedLoadICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) {
4417 Register receiver = LoadWithVectorDescriptor::ReceiverRegister(); // rdx
4418 Register key = LoadWithVectorDescriptor::NameRegister(); // rcx
4419 Register vector = LoadWithVectorDescriptor::VectorRegister(); // rbx
4420 Register slot = LoadWithVectorDescriptor::SlotRegister(); // rax
4421 Register feedback = rdi;
4422 Register integer_slot = r8;
4423 Register receiver_map = r9;
4425 __ SmiToInteger32(integer_slot, slot);
4426 __ movp(feedback, FieldOperand(vector, integer_slot, times_pointer_size,
4427 FixedArray::kHeaderSize));
4429 // Try to quickly handle the monomorphic case without knowing for sure
4430 // if we have a weak cell in feedback. We do know it's safe to look
4431 // at WeakCell::kValueOffset.
4432 Label try_array, load_smi_map, compare_map;
4433 Label not_array, miss;
4434 HandleMonomorphicCase(masm, receiver, receiver_map, feedback, vector,
4435 integer_slot, &compare_map, &load_smi_map, &try_array);
4437 __ bind(&try_array);
4438 // Is it a fixed array?
4439 __ CompareRoot(FieldOperand(feedback, 0), Heap::kFixedArrayMapRootIndex);
4440 __ j(not_equal, ¬_array);
4442 // We have a polymorphic element handler.
4443 Label polymorphic, try_poly_name;
4444 __ bind(&polymorphic);
4445 HandleArrayCases(masm, receiver, key, vector, slot, feedback, receiver_map,
4446 integer_slot, r11, r15, true, &miss);
4448 __ bind(¬_array);
4450 __ CompareRoot(feedback, Heap::kmegamorphic_symbolRootIndex);
4451 __ j(not_equal, &try_poly_name);
4452 Handle<Code> megamorphic_stub =
4453 KeyedLoadIC::ChooseMegamorphicStub(masm->isolate(), GetExtraICState());
4454 __ jmp(megamorphic_stub, RelocInfo::CODE_TARGET);
4456 __ bind(&try_poly_name);
4457 // We might have a name in feedback, and a fixed array in the next slot.
4458 __ cmpp(key, feedback);
4459 __ j(not_equal, &miss);
4460 // If the name comparison succeeded, we know we have a fixed array with
4461 // at least one map/handler pair.
4462 __ movp(feedback, FieldOperand(vector, integer_slot, times_pointer_size,
4463 FixedArray::kHeaderSize + kPointerSize));
4464 HandleArrayCases(masm, receiver, key, vector, slot, feedback, receiver_map,
4465 integer_slot, r11, r15, false, &miss);
4468 KeyedLoadIC::GenerateMiss(masm);
4470 __ bind(&load_smi_map);
4471 __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex);
4472 __ jmp(&compare_map);
4476 void VectorStoreICTrampolineStub::Generate(MacroAssembler* masm) {
4477 EmitLoadTypeFeedbackVector(masm, VectorStoreICDescriptor::VectorRegister());
4478 VectorStoreICStub stub(isolate(), state());
4479 stub.GenerateForTrampoline(masm);
4483 void VectorKeyedStoreICTrampolineStub::Generate(MacroAssembler* masm) {
4484 EmitLoadTypeFeedbackVector(masm, VectorStoreICDescriptor::VectorRegister());
4485 VectorKeyedStoreICStub stub(isolate(), state());
4486 stub.GenerateForTrampoline(masm);
4490 void VectorStoreICStub::Generate(MacroAssembler* masm) {
4491 GenerateImpl(masm, false);
4495 void VectorStoreICStub::GenerateForTrampoline(MacroAssembler* masm) {
4496 GenerateImpl(masm, true);
4500 void VectorStoreICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) {
4503 // TODO(mvstanton): Implement.
4505 StoreIC::GenerateMiss(masm);
4509 void VectorKeyedStoreICStub::Generate(MacroAssembler* masm) {
4510 GenerateImpl(masm, false);
4514 void VectorKeyedStoreICStub::GenerateForTrampoline(MacroAssembler* masm) {
4515 GenerateImpl(masm, true);
4519 void VectorKeyedStoreICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) {
4522 // TODO(mvstanton): Implement.
4524 KeyedStoreIC::GenerateMiss(masm);
4528 void CallICTrampolineStub::Generate(MacroAssembler* masm) {
4529 EmitLoadTypeFeedbackVector(masm, rbx);
4530 CallICStub stub(isolate(), state());
4531 __ jmp(stub.GetCode(), RelocInfo::CODE_TARGET);
4535 void CallIC_ArrayTrampolineStub::Generate(MacroAssembler* masm) {
4536 EmitLoadTypeFeedbackVector(masm, rbx);
4537 CallIC_ArrayStub stub(isolate(), state());
4538 __ jmp(stub.GetCode(), RelocInfo::CODE_TARGET);
4542 void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) {
4543 if (masm->isolate()->function_entry_hook() != NULL) {
4544 ProfileEntryHookStub stub(masm->isolate());
4545 masm->CallStub(&stub);
4550 void ProfileEntryHookStub::Generate(MacroAssembler* masm) {
4551 // This stub can be called from essentially anywhere, so it needs to save
4552 // all volatile and callee-save registers.
4553 const size_t kNumSavedRegisters = 2;
4554 __ pushq(arg_reg_1);
4555 __ pushq(arg_reg_2);
4557 // Calculate the original stack pointer and store it in the second arg.
4559 Operand(rsp, kNumSavedRegisters * kRegisterSize + kPCOnStackSize));
4561 // Calculate the function address to the first arg.
4562 __ movp(arg_reg_1, Operand(rsp, kNumSavedRegisters * kRegisterSize));
4563 __ subp(arg_reg_1, Immediate(Assembler::kShortCallInstructionLength));
4565 // Save the remainder of the volatile registers.
4566 masm->PushCallerSaved(kSaveFPRegs, arg_reg_1, arg_reg_2);
4568 // Call the entry hook function.
4569 __ Move(rax, FUNCTION_ADDR(isolate()->function_entry_hook()),
4570 Assembler::RelocInfoNone());
4572 AllowExternalCallThatCantCauseGC scope(masm);
4574 const int kArgumentCount = 2;
4575 __ PrepareCallCFunction(kArgumentCount);
4576 __ CallCFunction(rax, kArgumentCount);
4578 // Restore volatile regs.
4579 masm->PopCallerSaved(kSaveFPRegs, arg_reg_1, arg_reg_2);
4588 static void CreateArrayDispatch(MacroAssembler* masm,
4589 AllocationSiteOverrideMode mode) {
4590 if (mode == DISABLE_ALLOCATION_SITES) {
4591 T stub(masm->isolate(), GetInitialFastElementsKind(), mode);
4592 __ TailCallStub(&stub);
4593 } else if (mode == DONT_OVERRIDE) {
4594 int last_index = GetSequenceIndexFromFastElementsKind(
4595 TERMINAL_FAST_ELEMENTS_KIND);
4596 for (int i = 0; i <= last_index; ++i) {
4598 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
4599 __ cmpl(rdx, Immediate(kind));
4600 __ j(not_equal, &next);
4601 T stub(masm->isolate(), kind);
4602 __ TailCallStub(&stub);
4606 // If we reached this point there is a problem.
4607 __ Abort(kUnexpectedElementsKindInArrayConstructor);
4614 static void CreateArrayDispatchOneArgument(MacroAssembler* masm,
4615 AllocationSiteOverrideMode mode) {
4616 // rbx - allocation site (if mode != DISABLE_ALLOCATION_SITES)
4617 // rdx - kind (if mode != DISABLE_ALLOCATION_SITES)
4618 // rax - number of arguments
4619 // rdi - constructor?
4620 // rsp[0] - return address
4621 // rsp[8] - last argument
4622 Handle<Object> undefined_sentinel(
4623 masm->isolate()->heap()->undefined_value(),
4626 Label normal_sequence;
4627 if (mode == DONT_OVERRIDE) {
4628 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
4629 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
4630 STATIC_ASSERT(FAST_ELEMENTS == 2);
4631 STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3);
4632 STATIC_ASSERT(FAST_DOUBLE_ELEMENTS == 4);
4633 STATIC_ASSERT(FAST_HOLEY_DOUBLE_ELEMENTS == 5);
4635 // is the low bit set? If so, we are holey and that is good.
4636 __ testb(rdx, Immediate(1));
4637 __ j(not_zero, &normal_sequence);
4640 // look at the first argument
4641 StackArgumentsAccessor args(rsp, 1, ARGUMENTS_DONT_CONTAIN_RECEIVER);
4642 __ movp(rcx, args.GetArgumentOperand(0));
4644 __ j(zero, &normal_sequence);
4646 if (mode == DISABLE_ALLOCATION_SITES) {
4647 ElementsKind initial = GetInitialFastElementsKind();
4648 ElementsKind holey_initial = GetHoleyElementsKind(initial);
4650 ArraySingleArgumentConstructorStub stub_holey(masm->isolate(),
4652 DISABLE_ALLOCATION_SITES);
4653 __ TailCallStub(&stub_holey);
4655 __ bind(&normal_sequence);
4656 ArraySingleArgumentConstructorStub stub(masm->isolate(),
4658 DISABLE_ALLOCATION_SITES);
4659 __ TailCallStub(&stub);
4660 } else if (mode == DONT_OVERRIDE) {
4661 // We are going to create a holey array, but our kind is non-holey.
4662 // Fix kind and retry (only if we have an allocation site in the slot).
4665 if (FLAG_debug_code) {
4666 Handle<Map> allocation_site_map =
4667 masm->isolate()->factory()->allocation_site_map();
4668 __ Cmp(FieldOperand(rbx, 0), allocation_site_map);
4669 __ Assert(equal, kExpectedAllocationSite);
4672 // Save the resulting elements kind in type info. We can't just store r3
4673 // in the AllocationSite::transition_info field because elements kind is
4674 // restricted to a portion of the field...upper bits need to be left alone.
4675 STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0);
4676 __ SmiAddConstant(FieldOperand(rbx, AllocationSite::kTransitionInfoOffset),
4677 Smi::FromInt(kFastElementsKindPackedToHoley));
4679 __ bind(&normal_sequence);
4680 int last_index = GetSequenceIndexFromFastElementsKind(
4681 TERMINAL_FAST_ELEMENTS_KIND);
4682 for (int i = 0; i <= last_index; ++i) {
4684 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
4685 __ cmpl(rdx, Immediate(kind));
4686 __ j(not_equal, &next);
4687 ArraySingleArgumentConstructorStub stub(masm->isolate(), kind);
4688 __ TailCallStub(&stub);
4692 // If we reached this point there is a problem.
4693 __ Abort(kUnexpectedElementsKindInArrayConstructor);
4701 static void ArrayConstructorStubAheadOfTimeHelper(Isolate* isolate) {
4702 int to_index = GetSequenceIndexFromFastElementsKind(
4703 TERMINAL_FAST_ELEMENTS_KIND);
4704 for (int i = 0; i <= to_index; ++i) {
4705 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
4706 T stub(isolate, kind);
4708 if (AllocationSite::GetMode(kind) != DONT_TRACK_ALLOCATION_SITE) {
4709 T stub1(isolate, kind, DISABLE_ALLOCATION_SITES);
4716 void ArrayConstructorStubBase::GenerateStubsAheadOfTime(Isolate* isolate) {
4717 ArrayConstructorStubAheadOfTimeHelper<ArrayNoArgumentConstructorStub>(
4719 ArrayConstructorStubAheadOfTimeHelper<ArraySingleArgumentConstructorStub>(
4721 ArrayConstructorStubAheadOfTimeHelper<ArrayNArgumentsConstructorStub>(
4726 void InternalArrayConstructorStubBase::GenerateStubsAheadOfTime(
4728 ElementsKind kinds[2] = { FAST_ELEMENTS, FAST_HOLEY_ELEMENTS };
4729 for (int i = 0; i < 2; i++) {
4730 // For internal arrays we only need a few things
4731 InternalArrayNoArgumentConstructorStub stubh1(isolate, kinds[i]);
4733 InternalArraySingleArgumentConstructorStub stubh2(isolate, kinds[i]);
4735 InternalArrayNArgumentsConstructorStub stubh3(isolate, kinds[i]);
4741 void ArrayConstructorStub::GenerateDispatchToArrayStub(
4742 MacroAssembler* masm,
4743 AllocationSiteOverrideMode mode) {
4744 if (argument_count() == ANY) {
4745 Label not_zero_case, not_one_case;
4747 __ j(not_zero, ¬_zero_case);
4748 CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm, mode);
4750 __ bind(¬_zero_case);
4751 __ cmpl(rax, Immediate(1));
4752 __ j(greater, ¬_one_case);
4753 CreateArrayDispatchOneArgument(masm, mode);
4755 __ bind(¬_one_case);
4756 CreateArrayDispatch<ArrayNArgumentsConstructorStub>(masm, mode);
4757 } else if (argument_count() == NONE) {
4758 CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm, mode);
4759 } else if (argument_count() == ONE) {
4760 CreateArrayDispatchOneArgument(masm, mode);
4761 } else if (argument_count() == MORE_THAN_ONE) {
4762 CreateArrayDispatch<ArrayNArgumentsConstructorStub>(masm, mode);
4769 void ArrayConstructorStub::Generate(MacroAssembler* masm) {
4770 // ----------- S t a t e -------------
4772 // -- rbx : AllocationSite or undefined
4773 // -- rdi : constructor
4774 // -- rdx : original constructor
4775 // -- rsp[0] : return address
4776 // -- rsp[8] : last argument
4777 // -----------------------------------
4778 if (FLAG_debug_code) {
4779 // The array construct code is only set for the global and natives
4780 // builtin Array functions which always have maps.
4782 // Initial map for the builtin Array function should be a map.
4783 __ movp(rcx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
4784 // Will both indicate a NULL and a Smi.
4785 STATIC_ASSERT(kSmiTag == 0);
4786 Condition not_smi = NegateCondition(masm->CheckSmi(rcx));
4787 __ Check(not_smi, kUnexpectedInitialMapForArrayFunction);
4788 __ CmpObjectType(rcx, MAP_TYPE, rcx);
4789 __ Check(equal, kUnexpectedInitialMapForArrayFunction);
4791 // We should either have undefined in rbx or a valid AllocationSite
4792 __ AssertUndefinedOrAllocationSite(rbx);
4797 __ j(not_equal, &subclassing);
4800 // If the feedback vector is the undefined value call an array constructor
4801 // that doesn't use AllocationSites.
4802 __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex);
4803 __ j(equal, &no_info);
4805 // Only look at the lower 16 bits of the transition info.
4806 __ movp(rdx, FieldOperand(rbx, AllocationSite::kTransitionInfoOffset));
4807 __ SmiToInteger32(rdx, rdx);
4808 STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0);
4809 __ andp(rdx, Immediate(AllocationSite::ElementsKindBits::kMask));
4810 GenerateDispatchToArrayStub(masm, DONT_OVERRIDE);
4813 GenerateDispatchToArrayStub(masm, DISABLE_ALLOCATION_SITES);
4816 __ bind(&subclassing);
4817 __ Pop(rcx); // return address.
4822 switch (argument_count()) {
4825 __ addp(rax, Immediate(2));
4828 __ movp(rax, Immediate(2));
4831 __ movp(rax, Immediate(3));
4836 __ JumpToExternalReference(
4837 ExternalReference(Runtime::kArrayConstructorWithSubclassing, isolate()),
4842 void InternalArrayConstructorStub::GenerateCase(
4843 MacroAssembler* masm, ElementsKind kind) {
4844 Label not_zero_case, not_one_case;
4845 Label normal_sequence;
4848 __ j(not_zero, ¬_zero_case);
4849 InternalArrayNoArgumentConstructorStub stub0(isolate(), kind);
4850 __ TailCallStub(&stub0);
4852 __ bind(¬_zero_case);
4853 __ cmpl(rax, Immediate(1));
4854 __ j(greater, ¬_one_case);
4856 if (IsFastPackedElementsKind(kind)) {
4857 // We might need to create a holey array
4858 // look at the first argument
4859 StackArgumentsAccessor args(rsp, 1, ARGUMENTS_DONT_CONTAIN_RECEIVER);
4860 __ movp(rcx, args.GetArgumentOperand(0));
4862 __ j(zero, &normal_sequence);
4864 InternalArraySingleArgumentConstructorStub
4865 stub1_holey(isolate(), GetHoleyElementsKind(kind));
4866 __ TailCallStub(&stub1_holey);
4869 __ bind(&normal_sequence);
4870 InternalArraySingleArgumentConstructorStub stub1(isolate(), kind);
4871 __ TailCallStub(&stub1);
4873 __ bind(¬_one_case);
4874 InternalArrayNArgumentsConstructorStub stubN(isolate(), kind);
4875 __ TailCallStub(&stubN);
4879 void InternalArrayConstructorStub::Generate(MacroAssembler* masm) {
4880 // ----------- S t a t e -------------
4882 // -- rdi : constructor
4883 // -- rsp[0] : return address
4884 // -- rsp[8] : last argument
4885 // -----------------------------------
4887 if (FLAG_debug_code) {
4888 // The array construct code is only set for the global and natives
4889 // builtin Array functions which always have maps.
4891 // Initial map for the builtin Array function should be a map.
4892 __ movp(rcx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
4893 // Will both indicate a NULL and a Smi.
4894 STATIC_ASSERT(kSmiTag == 0);
4895 Condition not_smi = NegateCondition(masm->CheckSmi(rcx));
4896 __ Check(not_smi, kUnexpectedInitialMapForArrayFunction);
4897 __ CmpObjectType(rcx, MAP_TYPE, rcx);
4898 __ Check(equal, kUnexpectedInitialMapForArrayFunction);
4901 // Figure out the right elements kind
4902 __ movp(rcx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
4904 // Load the map's "bit field 2" into |result|. We only need the first byte,
4905 // but the following masking takes care of that anyway.
4906 __ movzxbp(rcx, FieldOperand(rcx, Map::kBitField2Offset));
4907 // Retrieve elements_kind from bit field 2.
4908 __ DecodeField<Map::ElementsKindBits>(rcx);
4910 if (FLAG_debug_code) {
4912 __ cmpl(rcx, Immediate(FAST_ELEMENTS));
4914 __ cmpl(rcx, Immediate(FAST_HOLEY_ELEMENTS));
4916 kInvalidElementsKindForInternalArrayOrInternalPackedArray);
4920 Label fast_elements_case;
4921 __ cmpl(rcx, Immediate(FAST_ELEMENTS));
4922 __ j(equal, &fast_elements_case);
4923 GenerateCase(masm, FAST_HOLEY_ELEMENTS);
4925 __ bind(&fast_elements_case);
4926 GenerateCase(masm, FAST_ELEMENTS);
4930 void LoadGlobalViaContextStub::Generate(MacroAssembler* masm) {
4931 Register context_reg = rsi;
4932 Register slot_reg = rbx;
4933 Register result_reg = rax;
4936 // Go up context chain to the script context.
4937 for (int i = 0; i < depth(); ++i) {
4938 __ movp(rdi, ContextOperand(context_reg, Context::PREVIOUS_INDEX));
4942 // Load the PropertyCell value at the specified slot.
4943 __ movp(result_reg, ContextOperand(context_reg, slot_reg));
4944 __ movp(result_reg, FieldOperand(result_reg, PropertyCell::kValueOffset));
4946 // Check that value is not the_hole.
4947 __ CompareRoot(result_reg, Heap::kTheHoleValueRootIndex);
4948 __ j(equal, &slow_case, Label::kNear);
4951 // Fallback to the runtime.
4952 __ bind(&slow_case);
4953 __ Integer32ToSmi(slot_reg, slot_reg);
4954 __ PopReturnAddressTo(kScratchRegister);
4956 __ Push(kScratchRegister);
4957 __ TailCallRuntime(Runtime::kLoadGlobalViaContext, 1, 1);
4961 void StoreGlobalViaContextStub::Generate(MacroAssembler* masm) {
4962 Register context_reg = rsi;
4963 Register slot_reg = rbx;
4964 Register value_reg = rax;
4965 Register cell_reg = r8;
4966 Register cell_details_reg = rdx;
4967 Register cell_value_reg = r9;
4968 Label fast_heapobject_case, fast_smi_case, slow_case;
4970 if (FLAG_debug_code) {
4971 __ CompareRoot(value_reg, Heap::kTheHoleValueRootIndex);
4972 __ Check(not_equal, kUnexpectedValue);
4975 // Go up context chain to the script context.
4976 for (int i = 0; i < depth(); ++i) {
4977 __ movp(rdi, ContextOperand(context_reg, Context::PREVIOUS_INDEX));
4981 // Load the PropertyCell at the specified slot.
4982 __ movp(cell_reg, ContextOperand(context_reg, slot_reg));
4984 // Load PropertyDetails for the cell (actually only the cell_type, kind and
4985 // READ_ONLY bit of attributes).
4986 __ SmiToInteger32(cell_details_reg,
4987 FieldOperand(cell_reg, PropertyCell::kDetailsOffset));
4988 __ andl(cell_details_reg,
4989 Immediate(PropertyDetails::PropertyCellTypeField::kMask |
4990 PropertyDetails::KindField::kMask |
4991 PropertyDetails::kAttributesReadOnlyMask));
4993 // Check if PropertyCell holds mutable data.
4994 Label not_mutable_data;
4995 __ cmpl(cell_details_reg,
4996 Immediate(PropertyDetails::PropertyCellTypeField::encode(
4997 PropertyCellType::kMutable) |
4998 PropertyDetails::KindField::encode(kData)));
4999 __ j(not_equal, ¬_mutable_data);
5000 __ JumpIfSmi(value_reg, &fast_smi_case);
5001 __ bind(&fast_heapobject_case);
5002 __ movp(FieldOperand(cell_reg, PropertyCell::kValueOffset), value_reg);
5003 __ RecordWriteField(cell_reg, PropertyCell::kValueOffset, value_reg,
5004 cell_value_reg, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
5006 // RecordWriteField clobbers the value register, so we need to reload.
5007 __ movp(value_reg, FieldOperand(cell_reg, PropertyCell::kValueOffset));
5009 __ bind(¬_mutable_data);
5011 // Check if PropertyCell value matches the new value (relevant for Constant,
5012 // ConstantType and Undefined cells).
5013 Label not_same_value;
5014 __ movp(cell_value_reg, FieldOperand(cell_reg, PropertyCell::kValueOffset));
5015 __ cmpp(cell_value_reg, value_reg);
5016 __ j(not_equal, ¬_same_value,
5017 FLAG_debug_code ? Label::kFar : Label::kNear);
5018 // Make sure the PropertyCell is not marked READ_ONLY.
5019 __ testl(cell_details_reg,
5020 Immediate(PropertyDetails::kAttributesReadOnlyMask));
5021 __ j(not_zero, &slow_case);
5022 if (FLAG_debug_code) {
5024 // This can only be true for Constant, ConstantType and Undefined cells,
5025 // because we never store the_hole via this stub.
5026 __ cmpl(cell_details_reg,
5027 Immediate(PropertyDetails::PropertyCellTypeField::encode(
5028 PropertyCellType::kConstant) |
5029 PropertyDetails::KindField::encode(kData)));
5031 __ cmpl(cell_details_reg,
5032 Immediate(PropertyDetails::PropertyCellTypeField::encode(
5033 PropertyCellType::kConstantType) |
5034 PropertyDetails::KindField::encode(kData)));
5036 __ cmpl(cell_details_reg,
5037 Immediate(PropertyDetails::PropertyCellTypeField::encode(
5038 PropertyCellType::kUndefined) |
5039 PropertyDetails::KindField::encode(kData)));
5040 __ Check(equal, kUnexpectedValue);
5044 __ bind(¬_same_value);
5046 // Check if PropertyCell contains data with constant type (and is not
5048 __ cmpl(cell_details_reg,
5049 Immediate(PropertyDetails::PropertyCellTypeField::encode(
5050 PropertyCellType::kConstantType) |
5051 PropertyDetails::KindField::encode(kData)));
5052 __ j(not_equal, &slow_case, Label::kNear);
5054 // Now either both old and new values must be SMIs or both must be heap
5055 // objects with same map.
5056 Label value_is_heap_object;
5057 __ JumpIfNotSmi(value_reg, &value_is_heap_object, Label::kNear);
5058 __ JumpIfNotSmi(cell_value_reg, &slow_case, Label::kNear);
5059 // Old and new values are SMIs, no need for a write barrier here.
5060 __ bind(&fast_smi_case);
5061 __ movp(FieldOperand(cell_reg, PropertyCell::kValueOffset), value_reg);
5063 __ bind(&value_is_heap_object);
5064 __ JumpIfSmi(cell_value_reg, &slow_case, Label::kNear);
5065 Register cell_value_map_reg = cell_value_reg;
5066 __ movp(cell_value_map_reg,
5067 FieldOperand(cell_value_reg, HeapObject::kMapOffset));
5068 __ cmpp(cell_value_map_reg, FieldOperand(value_reg, HeapObject::kMapOffset));
5069 __ j(equal, &fast_heapobject_case);
5071 // Fallback to the runtime.
5072 __ bind(&slow_case);
5073 __ Integer32ToSmi(slot_reg, slot_reg);
5074 __ PopReturnAddressTo(kScratchRegister);
5077 __ Push(kScratchRegister);
5078 __ TailCallRuntime(is_strict(language_mode())
5079 ? Runtime::kStoreGlobalViaContext_Strict
5080 : Runtime::kStoreGlobalViaContext_Sloppy,
5085 static int Offset(ExternalReference ref0, ExternalReference ref1) {
5086 int64_t offset = (ref0.address() - ref1.address());
5087 // Check that fits into int.
5088 DCHECK(static_cast<int>(offset) == offset);
5089 return static_cast<int>(offset);
5093 // Prepares stack to put arguments (aligns and so on). WIN64 calling
5094 // convention requires to put the pointer to the return value slot into
5095 // rcx (rcx must be preserverd until CallApiFunctionAndReturn). Saves
5096 // context (rsi). Clobbers rax. Allocates arg_stack_space * kPointerSize
5097 // inside the exit frame (not GCed) accessible via StackSpaceOperand.
5098 static void PrepareCallApiFunction(MacroAssembler* masm, int arg_stack_space) {
5099 __ EnterApiExitFrame(arg_stack_space);
5103 // Calls an API function. Allocates HandleScope, extracts returned value
5104 // from handle and propagates exceptions. Clobbers r14, r15, rbx and
5105 // caller-save registers. Restores context. On return removes
5106 // stack_space * kPointerSize (GCed).
5107 static void CallApiFunctionAndReturn(MacroAssembler* masm,
5108 Register function_address,
5109 ExternalReference thunk_ref,
5110 Register thunk_last_arg, int stack_space,
5111 Operand* stack_space_operand,
5112 Operand return_value_operand,
5113 Operand* context_restore_operand) {
5115 Label promote_scheduled_exception;
5116 Label delete_allocated_handles;
5117 Label leave_exit_frame;
5120 Isolate* isolate = masm->isolate();
5121 Factory* factory = isolate->factory();
5122 ExternalReference next_address =
5123 ExternalReference::handle_scope_next_address(isolate);
5124 const int kNextOffset = 0;
5125 const int kLimitOffset = Offset(
5126 ExternalReference::handle_scope_limit_address(isolate), next_address);
5127 const int kLevelOffset = Offset(
5128 ExternalReference::handle_scope_level_address(isolate), next_address);
5129 ExternalReference scheduled_exception_address =
5130 ExternalReference::scheduled_exception_address(isolate);
5132 DCHECK(rdx.is(function_address) || r8.is(function_address));
5133 // Allocate HandleScope in callee-save registers.
5134 Register prev_next_address_reg = r14;
5135 Register prev_limit_reg = rbx;
5136 Register base_reg = r15;
5137 __ Move(base_reg, next_address);
5138 __ movp(prev_next_address_reg, Operand(base_reg, kNextOffset));
5139 __ movp(prev_limit_reg, Operand(base_reg, kLimitOffset));
5140 __ addl(Operand(base_reg, kLevelOffset), Immediate(1));
5142 if (FLAG_log_timer_events) {
5143 FrameScope frame(masm, StackFrame::MANUAL);
5144 __ PushSafepointRegisters();
5145 __ PrepareCallCFunction(1);
5146 __ LoadAddress(arg_reg_1, ExternalReference::isolate_address(isolate));
5147 __ CallCFunction(ExternalReference::log_enter_external_function(isolate),
5149 __ PopSafepointRegisters();
5152 Label profiler_disabled;
5153 Label end_profiler_check;
5154 __ Move(rax, ExternalReference::is_profiling_address(isolate));
5155 __ cmpb(Operand(rax, 0), Immediate(0));
5156 __ j(zero, &profiler_disabled);
5158 // Third parameter is the address of the actual getter function.
5159 __ Move(thunk_last_arg, function_address);
5160 __ Move(rax, thunk_ref);
5161 __ jmp(&end_profiler_check);
5163 __ bind(&profiler_disabled);
5164 // Call the api function!
5165 __ Move(rax, function_address);
5167 __ bind(&end_profiler_check);
5169 // Call the api function!
5172 if (FLAG_log_timer_events) {
5173 FrameScope frame(masm, StackFrame::MANUAL);
5174 __ PushSafepointRegisters();
5175 __ PrepareCallCFunction(1);
5176 __ LoadAddress(arg_reg_1, ExternalReference::isolate_address(isolate));
5177 __ CallCFunction(ExternalReference::log_leave_external_function(isolate),
5179 __ PopSafepointRegisters();
5182 // Load the value from ReturnValue
5183 __ movp(rax, return_value_operand);
5186 // No more valid handles (the result handle was the last one). Restore
5187 // previous handle scope.
5188 __ subl(Operand(base_reg, kLevelOffset), Immediate(1));
5189 __ movp(Operand(base_reg, kNextOffset), prev_next_address_reg);
5190 __ cmpp(prev_limit_reg, Operand(base_reg, kLimitOffset));
5191 __ j(not_equal, &delete_allocated_handles);
5193 // Leave the API exit frame.
5194 __ bind(&leave_exit_frame);
5195 bool restore_context = context_restore_operand != NULL;
5196 if (restore_context) {
5197 __ movp(rsi, *context_restore_operand);
5199 if (stack_space_operand != nullptr) {
5200 __ movp(rbx, *stack_space_operand);
5202 __ LeaveApiExitFrame(!restore_context);
5204 // Check if the function scheduled an exception.
5205 __ Move(rdi, scheduled_exception_address);
5206 __ Cmp(Operand(rdi, 0), factory->the_hole_value());
5207 __ j(not_equal, &promote_scheduled_exception);
5210 // Check if the function returned a valid JavaScript value.
5212 Register return_value = rax;
5215 __ JumpIfSmi(return_value, &ok, Label::kNear);
5216 __ movp(map, FieldOperand(return_value, HeapObject::kMapOffset));
5218 __ CmpInstanceType(map, LAST_NAME_TYPE);
5219 __ j(below_equal, &ok, Label::kNear);
5221 __ CmpInstanceType(map, FIRST_SPEC_OBJECT_TYPE);
5222 __ j(above_equal, &ok, Label::kNear);
5224 __ CompareRoot(map, Heap::kHeapNumberMapRootIndex);
5225 __ j(equal, &ok, Label::kNear);
5227 __ CompareRoot(return_value, Heap::kUndefinedValueRootIndex);
5228 __ j(equal, &ok, Label::kNear);
5230 __ CompareRoot(return_value, Heap::kTrueValueRootIndex);
5231 __ j(equal, &ok, Label::kNear);
5233 __ CompareRoot(return_value, Heap::kFalseValueRootIndex);
5234 __ j(equal, &ok, Label::kNear);
5236 __ CompareRoot(return_value, Heap::kNullValueRootIndex);
5237 __ j(equal, &ok, Label::kNear);
5239 __ Abort(kAPICallReturnedInvalidObject);
5244 if (stack_space_operand != nullptr) {
5245 DCHECK_EQ(stack_space, 0);
5246 __ PopReturnAddressTo(rcx);
5250 __ ret(stack_space * kPointerSize);
5253 // Re-throw by promoting a scheduled exception.
5254 __ bind(&promote_scheduled_exception);
5255 __ TailCallRuntime(Runtime::kPromoteScheduledException, 0, 1);
5257 // HandleScope limit has changed. Delete allocated extensions.
5258 __ bind(&delete_allocated_handles);
5259 __ movp(Operand(base_reg, kLimitOffset), prev_limit_reg);
5260 __ movp(prev_limit_reg, rax);
5261 __ LoadAddress(arg_reg_1, ExternalReference::isolate_address(isolate));
5263 ExternalReference::delete_handle_scope_extensions(isolate));
5265 __ movp(rax, prev_limit_reg);
5266 __ jmp(&leave_exit_frame);
5270 static void CallApiFunctionStubHelper(MacroAssembler* masm,
5271 const ParameterCount& argc,
5272 bool return_first_arg,
5273 bool call_data_undefined) {
5274 // ----------- S t a t e -------------
5276 // -- rbx : call_data
5278 // -- rdx : api_function_address
5280 // -- rax : number of arguments if argc is a register
5281 // -- rsp[0] : return address
5282 // -- rsp[8] : last argument
5284 // -- rsp[argc * 8] : first argument
5285 // -- rsp[(argc + 1) * 8] : receiver
5286 // -----------------------------------
5288 Register callee = rdi;
5289 Register call_data = rbx;
5290 Register holder = rcx;
5291 Register api_function_address = rdx;
5292 Register context = rsi;
5293 Register return_address = r8;
5295 typedef FunctionCallbackArguments FCA;
5297 STATIC_ASSERT(FCA::kContextSaveIndex == 6);
5298 STATIC_ASSERT(FCA::kCalleeIndex == 5);
5299 STATIC_ASSERT(FCA::kDataIndex == 4);
5300 STATIC_ASSERT(FCA::kReturnValueOffset == 3);
5301 STATIC_ASSERT(FCA::kReturnValueDefaultValueIndex == 2);
5302 STATIC_ASSERT(FCA::kIsolateIndex == 1);
5303 STATIC_ASSERT(FCA::kHolderIndex == 0);
5304 STATIC_ASSERT(FCA::kArgsLength == 7);
5306 DCHECK(argc.is_immediate() || rax.is(argc.reg()));
5308 __ PopReturnAddressTo(return_address);
5318 Register scratch = call_data;
5319 if (!call_data_undefined) {
5320 __ LoadRoot(scratch, Heap::kUndefinedValueRootIndex);
5324 // return value default
5327 __ Move(scratch, ExternalReference::isolate_address(masm->isolate()));
5332 __ movp(scratch, rsp);
5333 // Push return address back on stack.
5334 __ PushReturnAddressFrom(return_address);
5336 // load context from callee
5337 __ movp(context, FieldOperand(callee, JSFunction::kContextOffset));
5339 // Allocate the v8::Arguments structure in the arguments' space since
5340 // it's not controlled by GC.
5341 const int kApiStackSpace = 4;
5343 PrepareCallApiFunction(masm, kApiStackSpace);
5345 // FunctionCallbackInfo::implicit_args_.
5346 __ movp(StackSpaceOperand(0), scratch);
5347 if (argc.is_immediate()) {
5348 __ addp(scratch, Immediate((argc.immediate() + FCA::kArgsLength - 1) *
5350 // FunctionCallbackInfo::values_.
5351 __ movp(StackSpaceOperand(1), scratch);
5352 // FunctionCallbackInfo::length_.
5353 __ Set(StackSpaceOperand(2), argc.immediate());
5354 // FunctionCallbackInfo::is_construct_call_.
5355 __ Set(StackSpaceOperand(3), 0);
5357 __ leap(scratch, Operand(scratch, argc.reg(), times_pointer_size,
5358 (FCA::kArgsLength - 1) * kPointerSize));
5359 // FunctionCallbackInfo::values_.
5360 __ movp(StackSpaceOperand(1), scratch);
5361 // FunctionCallbackInfo::length_.
5362 __ movp(StackSpaceOperand(2), argc.reg());
5363 // FunctionCallbackInfo::is_construct_call_.
5364 __ leap(argc.reg(), Operand(argc.reg(), times_pointer_size,
5365 (FCA::kArgsLength + 1) * kPointerSize));
5366 __ movp(StackSpaceOperand(3), argc.reg());
5369 #if defined(__MINGW64__) || defined(_WIN64)
5370 Register arguments_arg = rcx;
5371 Register callback_arg = rdx;
5373 Register arguments_arg = rdi;
5374 Register callback_arg = rsi;
5377 // It's okay if api_function_address == callback_arg
5378 // but not arguments_arg
5379 DCHECK(!api_function_address.is(arguments_arg));
5381 // v8::InvocationCallback's argument.
5382 __ leap(arguments_arg, StackSpaceOperand(0));
5384 ExternalReference thunk_ref =
5385 ExternalReference::invoke_function_callback(masm->isolate());
5387 // Accessor for FunctionCallbackInfo and first js arg.
5388 StackArgumentsAccessor args_from_rbp(rbp, FCA::kArgsLength + 1,
5389 ARGUMENTS_DONT_CONTAIN_RECEIVER);
5390 Operand context_restore_operand = args_from_rbp.GetArgumentOperand(
5391 FCA::kArgsLength - FCA::kContextSaveIndex);
5392 Operand is_construct_call_operand = StackSpaceOperand(3);
5393 Operand return_value_operand = args_from_rbp.GetArgumentOperand(
5394 return_first_arg ? 0 : FCA::kArgsLength - FCA::kReturnValueOffset);
5395 int stack_space = 0;
5396 Operand* stack_space_operand = &is_construct_call_operand;
5397 if (argc.is_immediate()) {
5398 stack_space = argc.immediate() + FCA::kArgsLength + 1;
5399 stack_space_operand = nullptr;
5401 CallApiFunctionAndReturn(masm, api_function_address, thunk_ref, callback_arg,
5402 stack_space, stack_space_operand,
5403 return_value_operand, &context_restore_operand);
5407 void CallApiFunctionStub::Generate(MacroAssembler* masm) {
5408 bool call_data_undefined = this->call_data_undefined();
5409 CallApiFunctionStubHelper(masm, ParameterCount(rax), false,
5410 call_data_undefined);
5414 void CallApiAccessorStub::Generate(MacroAssembler* masm) {
5415 bool is_store = this->is_store();
5416 int argc = this->argc();
5417 bool call_data_undefined = this->call_data_undefined();
5418 CallApiFunctionStubHelper(masm, ParameterCount(argc), is_store,
5419 call_data_undefined);
5423 void CallApiGetterStub::Generate(MacroAssembler* masm) {
5424 // ----------- S t a t e -------------
5425 // -- rsp[0] : return address
5427 // -- rsp[16 - kArgsLength*8] : PropertyCallbackArguments object
5429 // -- r8 : api_function_address
5430 // -----------------------------------
5432 #if defined(__MINGW64__) || defined(_WIN64)
5433 Register getter_arg = r8;
5434 Register accessor_info_arg = rdx;
5435 Register name_arg = rcx;
5437 Register getter_arg = rdx;
5438 Register accessor_info_arg = rsi;
5439 Register name_arg = rdi;
5441 Register api_function_address = ApiGetterDescriptor::function_address();
5442 DCHECK(api_function_address.is(r8));
5443 Register scratch = rax;
5445 // v8::Arguments::values_ and handler for name.
5446 const int kStackSpace = PropertyCallbackArguments::kArgsLength + 1;
5448 // Allocate v8::AccessorInfo in non-GCed stack space.
5449 const int kArgStackSpace = 1;
5451 __ leap(name_arg, Operand(rsp, kPCOnStackSize));
5453 PrepareCallApiFunction(masm, kArgStackSpace);
5454 __ leap(scratch, Operand(name_arg, 1 * kPointerSize));
5456 // v8::PropertyAccessorInfo::args_.
5457 __ movp(StackSpaceOperand(0), scratch);
5459 // The context register (rsi) has been saved in PrepareCallApiFunction and
5460 // could be used to pass arguments.
5461 __ leap(accessor_info_arg, StackSpaceOperand(0));
5463 ExternalReference thunk_ref =
5464 ExternalReference::invoke_accessor_getter_callback(isolate());
5466 // It's okay if api_function_address == getter_arg
5467 // but not accessor_info_arg or name_arg
5468 DCHECK(!api_function_address.is(accessor_info_arg) &&
5469 !api_function_address.is(name_arg));
5471 // The name handler is counted as an argument.
5472 StackArgumentsAccessor args(rbp, PropertyCallbackArguments::kArgsLength);
5473 Operand return_value_operand = args.GetArgumentOperand(
5474 PropertyCallbackArguments::kArgsLength - 1 -
5475 PropertyCallbackArguments::kReturnValueOffset);
5476 CallApiFunctionAndReturn(masm, api_function_address, thunk_ref, getter_arg,
5477 kStackSpace, nullptr, return_value_operand, NULL);
5483 } // namespace internal
5486 #endif // V8_TARGET_ARCH_X64