1 // Copyright 2013 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
9 #include "src/bootstrapper.h"
10 #include "src/code-stubs.h"
11 #include "src/codegen.h"
12 #include "src/ic/handler-compiler.h"
13 #include "src/ic/ic.h"
14 #include "src/isolate.h"
15 #include "src/jsregexp.h"
16 #include "src/regexp-macro-assembler.h"
17 #include "src/runtime/runtime.h"
23 static void InitializeArrayConstructorDescriptor(
24 Isolate* isolate, CodeStubDescriptor* descriptor,
25 int constant_stack_parameter_count) {
26 Address deopt_handler = Runtime::FunctionForId(
27 Runtime::kArrayConstructor)->entry;
29 if (constant_stack_parameter_count == 0) {
30 descriptor->Initialize(deopt_handler, constant_stack_parameter_count,
31 JS_FUNCTION_STUB_MODE);
33 descriptor->Initialize(rax, deopt_handler, constant_stack_parameter_count,
34 JS_FUNCTION_STUB_MODE, PASS_ARGUMENTS);
39 static void InitializeInternalArrayConstructorDescriptor(
40 Isolate* isolate, CodeStubDescriptor* descriptor,
41 int constant_stack_parameter_count) {
42 Address deopt_handler = Runtime::FunctionForId(
43 Runtime::kInternalArrayConstructor)->entry;
45 if (constant_stack_parameter_count == 0) {
46 descriptor->Initialize(deopt_handler, constant_stack_parameter_count,
47 JS_FUNCTION_STUB_MODE);
49 descriptor->Initialize(rax, deopt_handler, constant_stack_parameter_count,
50 JS_FUNCTION_STUB_MODE, PASS_ARGUMENTS);
55 void ArrayNoArgumentConstructorStub::InitializeDescriptor(
56 CodeStubDescriptor* descriptor) {
57 InitializeArrayConstructorDescriptor(isolate(), descriptor, 0);
61 void ArraySingleArgumentConstructorStub::InitializeDescriptor(
62 CodeStubDescriptor* descriptor) {
63 InitializeArrayConstructorDescriptor(isolate(), descriptor, 1);
67 void ArrayNArgumentsConstructorStub::InitializeDescriptor(
68 CodeStubDescriptor* descriptor) {
69 InitializeArrayConstructorDescriptor(isolate(), descriptor, -1);
73 void InternalArrayNoArgumentConstructorStub::InitializeDescriptor(
74 CodeStubDescriptor* descriptor) {
75 InitializeInternalArrayConstructorDescriptor(isolate(), descriptor, 0);
79 void InternalArraySingleArgumentConstructorStub::InitializeDescriptor(
80 CodeStubDescriptor* descriptor) {
81 InitializeInternalArrayConstructorDescriptor(isolate(), descriptor, 1);
85 void InternalArrayNArgumentsConstructorStub::InitializeDescriptor(
86 CodeStubDescriptor* descriptor) {
87 InitializeInternalArrayConstructorDescriptor(isolate(), descriptor, -1);
91 #define __ ACCESS_MASM(masm)
94 void HydrogenCodeStub::GenerateLightweightMiss(MacroAssembler* masm,
95 ExternalReference miss) {
96 // Update the static counter each time a new code stub is generated.
97 isolate()->counters()->code_stubs()->Increment();
99 CallInterfaceDescriptor descriptor = GetCallInterfaceDescriptor();
100 int param_count = descriptor.GetEnvironmentParameterCount();
102 // Call the runtime system in a fresh internal frame.
103 FrameScope scope(masm, StackFrame::INTERNAL);
104 DCHECK(param_count == 0 ||
105 rax.is(descriptor.GetEnvironmentParameterRegister(param_count - 1)));
107 for (int i = 0; i < param_count; ++i) {
108 __ Push(descriptor.GetEnvironmentParameterRegister(i));
110 __ CallExternalReference(miss, param_count);
117 void StoreBufferOverflowStub::Generate(MacroAssembler* masm) {
118 __ PushCallerSaved(save_doubles() ? kSaveFPRegs : kDontSaveFPRegs);
119 const int argument_count = 1;
120 __ PrepareCallCFunction(argument_count);
121 __ LoadAddress(arg_reg_1,
122 ExternalReference::isolate_address(isolate()));
124 AllowExternalCallThatCantCauseGC scope(masm);
126 ExternalReference::store_buffer_overflow_function(isolate()),
128 __ PopCallerSaved(save_doubles() ? kSaveFPRegs : kDontSaveFPRegs);
133 class FloatingPointHelper : public AllStatic {
135 enum ConvertUndefined {
136 CONVERT_UNDEFINED_TO_ZERO,
139 // Load the operands from rdx and rax into xmm0 and xmm1, as doubles.
140 // If the operands are not both numbers, jump to not_numbers.
141 // Leaves rdx and rax unchanged. SmiOperands assumes both are smis.
142 // NumberOperands assumes both are smis or heap numbers.
143 static void LoadSSE2UnknownOperands(MacroAssembler* masm,
148 void DoubleToIStub::Generate(MacroAssembler* masm) {
149 Register input_reg = this->source();
150 Register final_result_reg = this->destination();
151 DCHECK(is_truncating());
153 Label check_negative, process_64_bits, done;
155 int double_offset = offset();
157 // Account for return address and saved regs if input is rsp.
158 if (input_reg.is(rsp)) double_offset += 3 * kRegisterSize;
160 MemOperand mantissa_operand(MemOperand(input_reg, double_offset));
161 MemOperand exponent_operand(MemOperand(input_reg,
162 double_offset + kDoubleSize / 2));
165 Register scratch_candidates[3] = { rbx, rdx, rdi };
166 for (int i = 0; i < 3; i++) {
167 scratch1 = scratch_candidates[i];
168 if (!final_result_reg.is(scratch1) && !input_reg.is(scratch1)) break;
171 // Since we must use rcx for shifts below, use some other register (rax)
172 // to calculate the result if ecx is the requested return register.
173 Register result_reg = final_result_reg.is(rcx) ? rax : final_result_reg;
174 // Save ecx if it isn't the return register and therefore volatile, or if it
175 // is the return register, then save the temp register we use in its stead
177 Register save_reg = final_result_reg.is(rcx) ? rax : rcx;
181 bool stash_exponent_copy = !input_reg.is(rsp);
182 __ movl(scratch1, mantissa_operand);
183 __ movsd(xmm0, mantissa_operand);
184 __ movl(rcx, exponent_operand);
185 if (stash_exponent_copy) __ pushq(rcx);
187 __ andl(rcx, Immediate(HeapNumber::kExponentMask));
188 __ shrl(rcx, Immediate(HeapNumber::kExponentShift));
189 __ leal(result_reg, MemOperand(rcx, -HeapNumber::kExponentBias));
190 __ cmpl(result_reg, Immediate(HeapNumber::kMantissaBits));
191 __ j(below, &process_64_bits);
193 // Result is entirely in lower 32-bits of mantissa
194 int delta = HeapNumber::kExponentBias + Double::kPhysicalSignificandSize;
195 __ subl(rcx, Immediate(delta));
196 __ xorl(result_reg, result_reg);
197 __ cmpl(rcx, Immediate(31));
199 __ shll_cl(scratch1);
200 __ jmp(&check_negative);
202 __ bind(&process_64_bits);
203 __ cvttsd2siq(result_reg, xmm0);
204 __ jmp(&done, Label::kNear);
206 // If the double was negative, negate the integer result.
207 __ bind(&check_negative);
208 __ movl(result_reg, scratch1);
210 if (stash_exponent_copy) {
211 __ cmpl(MemOperand(rsp, 0), Immediate(0));
213 __ cmpl(exponent_operand, Immediate(0));
215 __ cmovl(greater, result_reg, scratch1);
219 if (stash_exponent_copy) {
220 __ addp(rsp, Immediate(kDoubleSize));
222 if (!final_result_reg.is(result_reg)) {
223 DCHECK(final_result_reg.is(rcx));
224 __ movl(final_result_reg, result_reg);
232 void FloatingPointHelper::LoadSSE2UnknownOperands(MacroAssembler* masm,
233 Label* not_numbers) {
234 Label load_smi_rdx, load_nonsmi_rax, load_smi_rax, load_float_rax, done;
235 // Load operand in rdx into xmm0, or branch to not_numbers.
236 __ LoadRoot(rcx, Heap::kHeapNumberMapRootIndex);
237 __ JumpIfSmi(rdx, &load_smi_rdx);
238 __ cmpp(FieldOperand(rdx, HeapObject::kMapOffset), rcx);
239 __ j(not_equal, not_numbers); // Argument in rdx is not a number.
240 __ movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset));
241 // Load operand in rax into xmm1, or branch to not_numbers.
242 __ JumpIfSmi(rax, &load_smi_rax);
244 __ bind(&load_nonsmi_rax);
245 __ cmpp(FieldOperand(rax, HeapObject::kMapOffset), rcx);
246 __ j(not_equal, not_numbers);
247 __ movsd(xmm1, FieldOperand(rax, HeapNumber::kValueOffset));
250 __ bind(&load_smi_rdx);
251 __ SmiToInteger32(kScratchRegister, rdx);
252 __ Cvtlsi2sd(xmm0, kScratchRegister);
253 __ JumpIfNotSmi(rax, &load_nonsmi_rax);
255 __ bind(&load_smi_rax);
256 __ SmiToInteger32(kScratchRegister, rax);
257 __ Cvtlsi2sd(xmm1, kScratchRegister);
262 void MathPowStub::Generate(MacroAssembler* masm) {
263 const Register exponent = MathPowTaggedDescriptor::exponent();
264 DCHECK(exponent.is(rdx));
265 const Register base = rax;
266 const Register scratch = rcx;
267 const XMMRegister double_result = xmm3;
268 const XMMRegister double_base = xmm2;
269 const XMMRegister double_exponent = xmm1;
270 const XMMRegister double_scratch = xmm4;
272 Label call_runtime, done, exponent_not_smi, int_exponent;
274 // Save 1 in double_result - we need this several times later on.
275 __ movp(scratch, Immediate(1));
276 __ Cvtlsi2sd(double_result, scratch);
278 if (exponent_type() == ON_STACK) {
279 Label base_is_smi, unpack_exponent;
280 // The exponent and base are supplied as arguments on the stack.
281 // This can only happen if the stub is called from non-optimized code.
282 // Load input parameters from stack.
283 StackArgumentsAccessor args(rsp, 2, ARGUMENTS_DONT_CONTAIN_RECEIVER);
284 __ movp(base, args.GetArgumentOperand(0));
285 __ movp(exponent, args.GetArgumentOperand(1));
286 __ JumpIfSmi(base, &base_is_smi, Label::kNear);
287 __ CompareRoot(FieldOperand(base, HeapObject::kMapOffset),
288 Heap::kHeapNumberMapRootIndex);
289 __ j(not_equal, &call_runtime);
291 __ movsd(double_base, FieldOperand(base, HeapNumber::kValueOffset));
292 __ jmp(&unpack_exponent, Label::kNear);
294 __ bind(&base_is_smi);
295 __ SmiToInteger32(base, base);
296 __ Cvtlsi2sd(double_base, base);
297 __ bind(&unpack_exponent);
299 __ JumpIfNotSmi(exponent, &exponent_not_smi, Label::kNear);
300 __ SmiToInteger32(exponent, exponent);
301 __ jmp(&int_exponent);
303 __ bind(&exponent_not_smi);
304 __ CompareRoot(FieldOperand(exponent, HeapObject::kMapOffset),
305 Heap::kHeapNumberMapRootIndex);
306 __ j(not_equal, &call_runtime);
307 __ movsd(double_exponent, FieldOperand(exponent, HeapNumber::kValueOffset));
308 } else if (exponent_type() == TAGGED) {
309 __ JumpIfNotSmi(exponent, &exponent_not_smi, Label::kNear);
310 __ SmiToInteger32(exponent, exponent);
311 __ jmp(&int_exponent);
313 __ bind(&exponent_not_smi);
314 __ movsd(double_exponent, FieldOperand(exponent, HeapNumber::kValueOffset));
317 if (exponent_type() != INTEGER) {
318 Label fast_power, try_arithmetic_simplification;
319 // Detect integer exponents stored as double.
320 __ DoubleToI(exponent, double_exponent, double_scratch,
321 TREAT_MINUS_ZERO_AS_ZERO, &try_arithmetic_simplification,
322 &try_arithmetic_simplification,
323 &try_arithmetic_simplification);
324 __ jmp(&int_exponent);
326 __ bind(&try_arithmetic_simplification);
327 __ cvttsd2si(exponent, double_exponent);
328 // Skip to runtime if possibly NaN (indicated by the indefinite integer).
329 __ cmpl(exponent, Immediate(0x1));
330 __ j(overflow, &call_runtime);
332 if (exponent_type() == ON_STACK) {
333 // Detect square root case. Crankshaft detects constant +/-0.5 at
334 // compile time and uses DoMathPowHalf instead. We then skip this check
335 // for non-constant cases of +/-0.5 as these hardly occur.
336 Label continue_sqrt, continue_rsqrt, not_plus_half;
338 // Load double_scratch with 0.5.
339 __ movq(scratch, V8_UINT64_C(0x3FE0000000000000));
340 __ movq(double_scratch, scratch);
341 // Already ruled out NaNs for exponent.
342 __ ucomisd(double_scratch, double_exponent);
343 __ j(not_equal, ¬_plus_half, Label::kNear);
345 // Calculates square root of base. Check for the special case of
346 // Math.pow(-Infinity, 0.5) == Infinity (ECMA spec, 15.8.2.13).
347 // According to IEEE-754, double-precision -Infinity has the highest
348 // 12 bits set and the lowest 52 bits cleared.
349 __ movq(scratch, V8_UINT64_C(0xFFF0000000000000));
350 __ movq(double_scratch, scratch);
351 __ ucomisd(double_scratch, double_base);
352 // Comparing -Infinity with NaN results in "unordered", which sets the
353 // zero flag as if both were equal. However, it also sets the carry flag.
354 __ j(not_equal, &continue_sqrt, Label::kNear);
355 __ j(carry, &continue_sqrt, Label::kNear);
357 // Set result to Infinity in the special case.
358 __ xorps(double_result, double_result);
359 __ subsd(double_result, double_scratch);
362 __ bind(&continue_sqrt);
363 // sqrtsd returns -0 when input is -0. ECMA spec requires +0.
364 __ xorps(double_scratch, double_scratch);
365 __ addsd(double_scratch, double_base); // Convert -0 to 0.
366 __ sqrtsd(double_result, double_scratch);
370 __ bind(¬_plus_half);
371 // Load double_scratch with -0.5 by substracting 1.
372 __ subsd(double_scratch, double_result);
373 // Already ruled out NaNs for exponent.
374 __ ucomisd(double_scratch, double_exponent);
375 __ j(not_equal, &fast_power, Label::kNear);
377 // Calculates reciprocal of square root of base. Check for the special
378 // case of Math.pow(-Infinity, -0.5) == 0 (ECMA spec, 15.8.2.13).
379 // According to IEEE-754, double-precision -Infinity has the highest
380 // 12 bits set and the lowest 52 bits cleared.
381 __ movq(scratch, V8_UINT64_C(0xFFF0000000000000));
382 __ movq(double_scratch, scratch);
383 __ ucomisd(double_scratch, double_base);
384 // Comparing -Infinity with NaN results in "unordered", which sets the
385 // zero flag as if both were equal. However, it also sets the carry flag.
386 __ j(not_equal, &continue_rsqrt, Label::kNear);
387 __ j(carry, &continue_rsqrt, Label::kNear);
389 // Set result to 0 in the special case.
390 __ xorps(double_result, double_result);
393 __ bind(&continue_rsqrt);
394 // sqrtsd returns -0 when input is -0. ECMA spec requires +0.
395 __ xorps(double_exponent, double_exponent);
396 __ addsd(double_exponent, double_base); // Convert -0 to +0.
397 __ sqrtsd(double_exponent, double_exponent);
398 __ divsd(double_result, double_exponent);
402 // Using FPU instructions to calculate power.
403 Label fast_power_failed;
404 __ bind(&fast_power);
405 __ fnclex(); // Clear flags to catch exceptions later.
406 // Transfer (B)ase and (E)xponent onto the FPU register stack.
407 __ subp(rsp, Immediate(kDoubleSize));
408 __ movsd(Operand(rsp, 0), double_exponent);
409 __ fld_d(Operand(rsp, 0)); // E
410 __ movsd(Operand(rsp, 0), double_base);
411 __ fld_d(Operand(rsp, 0)); // B, E
413 // Exponent is in st(1) and base is in st(0)
414 // B ^ E = (2^(E * log2(B)) - 1) + 1 = (2^X - 1) + 1 for X = E * log2(B)
415 // FYL2X calculates st(1) * log2(st(0))
418 __ frndint(); // rnd(X), X
419 __ fsub(1); // rnd(X), X-rnd(X)
420 __ fxch(1); // X - rnd(X), rnd(X)
421 // F2XM1 calculates 2^st(0) - 1 for -1 < st(0) < 1
422 __ f2xm1(); // 2^(X-rnd(X)) - 1, rnd(X)
423 __ fld1(); // 1, 2^(X-rnd(X)) - 1, rnd(X)
424 __ faddp(1); // 2^(X-rnd(X)), rnd(X)
425 // FSCALE calculates st(0) * 2^st(1)
426 __ fscale(); // 2^X, rnd(X)
428 // Bail out to runtime in case of exceptions in the status word.
430 __ testb(rax, Immediate(0x5F)); // Check for all but precision exception.
431 __ j(not_zero, &fast_power_failed, Label::kNear);
432 __ fstp_d(Operand(rsp, 0));
433 __ movsd(double_result, Operand(rsp, 0));
434 __ addp(rsp, Immediate(kDoubleSize));
437 __ bind(&fast_power_failed);
439 __ addp(rsp, Immediate(kDoubleSize));
440 __ jmp(&call_runtime);
443 // Calculate power with integer exponent.
444 __ bind(&int_exponent);
445 const XMMRegister double_scratch2 = double_exponent;
446 // Back up exponent as we need to check if exponent is negative later.
447 __ movp(scratch, exponent); // Back up exponent.
448 __ movsd(double_scratch, double_base); // Back up base.
449 __ movsd(double_scratch2, double_result); // Load double_exponent with 1.
451 // Get absolute value of exponent.
452 Label no_neg, while_true, while_false;
453 __ testl(scratch, scratch);
454 __ j(positive, &no_neg, Label::kNear);
458 __ j(zero, &while_false, Label::kNear);
459 __ shrl(scratch, Immediate(1));
460 // Above condition means CF==0 && ZF==0. This means that the
461 // bit that has been shifted out is 0 and the result is not 0.
462 __ j(above, &while_true, Label::kNear);
463 __ movsd(double_result, double_scratch);
464 __ j(zero, &while_false, Label::kNear);
466 __ bind(&while_true);
467 __ shrl(scratch, Immediate(1));
468 __ mulsd(double_scratch, double_scratch);
469 __ j(above, &while_true, Label::kNear);
470 __ mulsd(double_result, double_scratch);
471 __ j(not_zero, &while_true);
473 __ bind(&while_false);
474 // If the exponent is negative, return 1/result.
475 __ testl(exponent, exponent);
476 __ j(greater, &done);
477 __ divsd(double_scratch2, double_result);
478 __ movsd(double_result, double_scratch2);
479 // Test whether result is zero. Bail out to check for subnormal result.
480 // Due to subnormals, x^-y == (1/x)^y does not hold in all cases.
481 __ xorps(double_scratch2, double_scratch2);
482 __ ucomisd(double_scratch2, double_result);
483 // double_exponent aliased as double_scratch2 has already been overwritten
484 // and may not have contained the exponent value in the first place when the
485 // input was a smi. We reset it with exponent value before bailing out.
486 __ j(not_equal, &done);
487 __ Cvtlsi2sd(double_exponent, exponent);
489 // Returning or bailing out.
490 Counters* counters = isolate()->counters();
491 if (exponent_type() == ON_STACK) {
492 // The arguments are still on the stack.
493 __ bind(&call_runtime);
494 __ TailCallRuntime(Runtime::kMathPowRT, 2, 1);
496 // The stub is called from non-optimized code, which expects the result
497 // as heap number in rax.
499 __ AllocateHeapNumber(rax, rcx, &call_runtime);
500 __ movsd(FieldOperand(rax, HeapNumber::kValueOffset), double_result);
501 __ IncrementCounter(counters->math_pow(), 1);
502 __ ret(2 * kPointerSize);
504 __ bind(&call_runtime);
505 // Move base to the correct argument register. Exponent is already in xmm1.
506 __ movsd(xmm0, double_base);
507 DCHECK(double_exponent.is(xmm1));
509 AllowExternalCallThatCantCauseGC scope(masm);
510 __ PrepareCallCFunction(2);
512 ExternalReference::power_double_double_function(isolate()), 2);
514 // Return value is in xmm0.
515 __ movsd(double_result, xmm0);
518 __ IncrementCounter(counters->math_pow(), 1);
524 void FunctionPrototypeStub::Generate(MacroAssembler* masm) {
526 Register receiver = LoadDescriptor::ReceiverRegister();
527 // Ensure that the vector and slot registers won't be clobbered before
528 // calling the miss handler.
529 DCHECK(!FLAG_vector_ics ||
530 !AreAliased(r8, r9, VectorLoadICDescriptor::VectorRegister(),
531 VectorLoadICDescriptor::SlotRegister()));
533 NamedLoadHandlerCompiler::GenerateLoadFunctionPrototype(masm, receiver, r8,
536 PropertyAccessCompiler::TailCallBuiltin(
537 masm, PropertyAccessCompiler::MissBuiltin(Code::LOAD_IC));
541 void ArgumentsAccessStub::GenerateReadElement(MacroAssembler* masm) {
542 CHECK(!has_new_target());
543 // The key is in rdx and the parameter count is in rax.
544 DCHECK(rdx.is(ArgumentsAccessReadDescriptor::index()));
545 DCHECK(rax.is(ArgumentsAccessReadDescriptor::parameter_count()));
547 // Check that the key is a smi.
549 __ JumpIfNotSmi(rdx, &slow);
551 // Check if the calling frame is an arguments adaptor frame. We look at the
552 // context offset, and if the frame is not a regular one, then we find a
553 // Smi instead of the context. We can't use SmiCompare here, because that
554 // only works for comparing two smis.
556 __ movp(rbx, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
557 __ Cmp(Operand(rbx, StandardFrameConstants::kContextOffset),
558 Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
559 __ j(equal, &adaptor);
561 // Check index against formal parameters count limit passed in
562 // through register rax. Use unsigned comparison to get negative
565 __ j(above_equal, &slow);
567 // Read the argument from the stack and return it.
568 __ SmiSub(rax, rax, rdx);
569 __ SmiToInteger32(rax, rax);
570 StackArgumentsAccessor args(rbp, rax, ARGUMENTS_DONT_CONTAIN_RECEIVER);
571 __ movp(rax, args.GetArgumentOperand(0));
574 // Arguments adaptor case: Check index against actual arguments
575 // limit found in the arguments adaptor frame. Use unsigned
576 // comparison to get negative check for free.
578 __ movp(rcx, Operand(rbx, ArgumentsAdaptorFrameConstants::kLengthOffset));
580 __ j(above_equal, &slow);
582 // Read the argument from the stack and return it.
583 __ SmiSub(rcx, rcx, rdx);
584 __ SmiToInteger32(rcx, rcx);
585 StackArgumentsAccessor adaptor_args(rbx, rcx,
586 ARGUMENTS_DONT_CONTAIN_RECEIVER);
587 __ movp(rax, adaptor_args.GetArgumentOperand(0));
590 // Slow-case: Handle non-smi or out-of-bounds access to arguments
591 // by calling the runtime system.
593 __ PopReturnAddressTo(rbx);
595 __ PushReturnAddressFrom(rbx);
596 __ TailCallRuntime(Runtime::kGetArgumentsProperty, 1, 1);
600 void ArgumentsAccessStub::GenerateNewSloppyFast(MacroAssembler* masm) {
602 // rsp[0] : return address
603 // rsp[8] : number of parameters (tagged)
604 // rsp[16] : receiver displacement
605 // rsp[24] : function
606 // Registers used over the whole function:
607 // rbx: the mapped parameter count (untagged)
608 // rax: the allocated object (tagged).
610 CHECK(!has_new_target());
612 Factory* factory = isolate()->factory();
614 StackArgumentsAccessor args(rsp, 3, ARGUMENTS_DONT_CONTAIN_RECEIVER);
615 __ SmiToInteger64(rbx, args.GetArgumentOperand(2));
616 // rbx = parameter count (untagged)
618 // Check if the calling frame is an arguments adaptor frame.
620 Label adaptor_frame, try_allocate;
621 __ movp(rdx, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
622 __ movp(rcx, Operand(rdx, StandardFrameConstants::kContextOffset));
623 __ Cmp(rcx, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
624 __ j(equal, &adaptor_frame);
626 // No adaptor, parameter count = argument count.
628 __ jmp(&try_allocate, Label::kNear);
630 // We have an adaptor frame. Patch the parameters pointer.
631 __ bind(&adaptor_frame);
632 __ SmiToInteger64(rcx,
634 ArgumentsAdaptorFrameConstants::kLengthOffset));
635 __ leap(rdx, Operand(rdx, rcx, times_pointer_size,
636 StandardFrameConstants::kCallerSPOffset));
637 __ movp(args.GetArgumentOperand(1), rdx);
639 // rbx = parameter count (untagged)
640 // rcx = argument count (untagged)
641 // Compute the mapped parameter count = min(rbx, rcx) in rbx.
643 __ j(less_equal, &try_allocate, Label::kNear);
646 __ bind(&try_allocate);
648 // Compute the sizes of backing store, parameter map, and arguments object.
649 // 1. Parameter map, has 2 extra words containing context and backing store.
650 const int kParameterMapHeaderSize =
651 FixedArray::kHeaderSize + 2 * kPointerSize;
652 Label no_parameter_map;
655 __ j(zero, &no_parameter_map, Label::kNear);
656 __ leap(r8, Operand(rbx, times_pointer_size, kParameterMapHeaderSize));
657 __ bind(&no_parameter_map);
660 __ leap(r8, Operand(r8, rcx, times_pointer_size, FixedArray::kHeaderSize));
662 // 3. Arguments object.
663 __ addp(r8, Immediate(Heap::kSloppyArgumentsObjectSize));
665 // Do the allocation of all three objects in one go.
666 __ Allocate(r8, rax, rdx, rdi, &runtime, TAG_OBJECT);
668 // rax = address of new object(s) (tagged)
669 // rcx = argument count (untagged)
670 // Get the arguments map from the current native context into rdi.
671 Label has_mapped_parameters, instantiate;
672 __ movp(rdi, Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
673 __ movp(rdi, FieldOperand(rdi, GlobalObject::kNativeContextOffset));
675 __ j(not_zero, &has_mapped_parameters, Label::kNear);
677 const int kIndex = Context::SLOPPY_ARGUMENTS_MAP_INDEX;
678 __ movp(rdi, Operand(rdi, Context::SlotOffset(kIndex)));
679 __ jmp(&instantiate, Label::kNear);
681 const int kAliasedIndex = Context::ALIASED_ARGUMENTS_MAP_INDEX;
682 __ bind(&has_mapped_parameters);
683 __ movp(rdi, Operand(rdi, Context::SlotOffset(kAliasedIndex)));
684 __ bind(&instantiate);
686 // rax = address of new object (tagged)
687 // rbx = mapped parameter count (untagged)
688 // rcx = argument count (untagged)
689 // rdi = address of arguments map (tagged)
690 __ movp(FieldOperand(rax, JSObject::kMapOffset), rdi);
691 __ LoadRoot(kScratchRegister, Heap::kEmptyFixedArrayRootIndex);
692 __ movp(FieldOperand(rax, JSObject::kPropertiesOffset), kScratchRegister);
693 __ movp(FieldOperand(rax, JSObject::kElementsOffset), kScratchRegister);
695 // Set up the callee in-object property.
696 STATIC_ASSERT(Heap::kArgumentsCalleeIndex == 1);
697 __ movp(rdx, args.GetArgumentOperand(0));
698 __ AssertNotSmi(rdx);
699 __ movp(FieldOperand(rax, JSObject::kHeaderSize +
700 Heap::kArgumentsCalleeIndex * kPointerSize),
703 // Use the length (smi tagged) and set that as an in-object property too.
704 // Note: rcx is tagged from here on.
705 STATIC_ASSERT(Heap::kArgumentsLengthIndex == 0);
706 __ Integer32ToSmi(rcx, rcx);
707 __ movp(FieldOperand(rax, JSObject::kHeaderSize +
708 Heap::kArgumentsLengthIndex * kPointerSize),
711 // Set up the elements pointer in the allocated arguments object.
712 // If we allocated a parameter map, edi will point there, otherwise to the
714 __ leap(rdi, Operand(rax, Heap::kSloppyArgumentsObjectSize));
715 __ movp(FieldOperand(rax, JSObject::kElementsOffset), rdi);
717 // rax = address of new object (tagged)
718 // rbx = mapped parameter count (untagged)
719 // rcx = argument count (tagged)
720 // rdi = address of parameter map or backing store (tagged)
722 // Initialize parameter map. If there are no mapped arguments, we're done.
723 Label skip_parameter_map;
725 __ j(zero, &skip_parameter_map);
727 __ LoadRoot(kScratchRegister, Heap::kSloppyArgumentsElementsMapRootIndex);
728 // rbx contains the untagged argument count. Add 2 and tag to write.
729 __ movp(FieldOperand(rdi, FixedArray::kMapOffset), kScratchRegister);
730 __ Integer64PlusConstantToSmi(r9, rbx, 2);
731 __ movp(FieldOperand(rdi, FixedArray::kLengthOffset), r9);
732 __ movp(FieldOperand(rdi, FixedArray::kHeaderSize + 0 * kPointerSize), rsi);
733 __ leap(r9, Operand(rdi, rbx, times_pointer_size, kParameterMapHeaderSize));
734 __ movp(FieldOperand(rdi, FixedArray::kHeaderSize + 1 * kPointerSize), r9);
736 // Copy the parameter slots and the holes in the arguments.
737 // We need to fill in mapped_parameter_count slots. They index the context,
738 // where parameters are stored in reverse order, at
739 // MIN_CONTEXT_SLOTS .. MIN_CONTEXT_SLOTS+parameter_count-1
740 // The mapped parameter thus need to get indices
741 // MIN_CONTEXT_SLOTS+parameter_count-1 ..
742 // MIN_CONTEXT_SLOTS+parameter_count-mapped_parameter_count
743 // We loop from right to left.
744 Label parameters_loop, parameters_test;
746 // Load tagged parameter count into r9.
747 __ Integer32ToSmi(r9, rbx);
748 __ Move(r8, Smi::FromInt(Context::MIN_CONTEXT_SLOTS));
749 __ addp(r8, args.GetArgumentOperand(2));
751 __ Move(r11, factory->the_hole_value());
753 __ leap(rdi, Operand(rdi, rbx, times_pointer_size, kParameterMapHeaderSize));
754 // r9 = loop variable (tagged)
755 // r8 = mapping index (tagged)
756 // r11 = the hole value
757 // rdx = address of parameter map (tagged)
758 // rdi = address of backing store (tagged)
759 __ jmp(¶meters_test, Label::kNear);
761 __ bind(¶meters_loop);
762 __ SmiSubConstant(r9, r9, Smi::FromInt(1));
763 __ SmiToInteger64(kScratchRegister, r9);
764 __ movp(FieldOperand(rdx, kScratchRegister,
766 kParameterMapHeaderSize),
768 __ movp(FieldOperand(rdi, kScratchRegister,
770 FixedArray::kHeaderSize),
772 __ SmiAddConstant(r8, r8, Smi::FromInt(1));
773 __ bind(¶meters_test);
775 __ j(not_zero, ¶meters_loop, Label::kNear);
777 __ bind(&skip_parameter_map);
779 // rcx = argument count (tagged)
780 // rdi = address of backing store (tagged)
781 // Copy arguments header and remaining slots (if there are any).
782 __ Move(FieldOperand(rdi, FixedArray::kMapOffset),
783 factory->fixed_array_map());
784 __ movp(FieldOperand(rdi, FixedArray::kLengthOffset), rcx);
786 Label arguments_loop, arguments_test;
788 __ movp(rdx, args.GetArgumentOperand(1));
789 // Untag rcx for the loop below.
790 __ SmiToInteger64(rcx, rcx);
791 __ leap(kScratchRegister, Operand(r8, times_pointer_size, 0));
792 __ subp(rdx, kScratchRegister);
793 __ jmp(&arguments_test, Label::kNear);
795 __ bind(&arguments_loop);
796 __ subp(rdx, Immediate(kPointerSize));
797 __ movp(r9, Operand(rdx, 0));
798 __ movp(FieldOperand(rdi, r8,
800 FixedArray::kHeaderSize),
802 __ addp(r8, Immediate(1));
804 __ bind(&arguments_test);
806 __ j(less, &arguments_loop, Label::kNear);
808 // Return and remove the on-stack parameters.
809 __ ret(3 * kPointerSize);
811 // Do the runtime call to allocate the arguments object.
812 // rcx = argument count (untagged)
814 __ Integer32ToSmi(rcx, rcx);
815 __ movp(args.GetArgumentOperand(2), rcx); // Patch argument count.
816 __ TailCallRuntime(Runtime::kNewSloppyArguments, 3, 1);
820 void ArgumentsAccessStub::GenerateNewSloppySlow(MacroAssembler* masm) {
821 // rsp[0] : return address
822 // rsp[8] : number of parameters
823 // rsp[16] : receiver displacement
824 // rsp[24] : function
825 CHECK(!has_new_target());
827 // Check if the calling frame is an arguments adaptor frame.
829 __ movp(rdx, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
830 __ movp(rcx, Operand(rdx, StandardFrameConstants::kContextOffset));
831 __ Cmp(rcx, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
832 __ j(not_equal, &runtime);
834 // Patch the arguments.length and the parameters pointer.
835 StackArgumentsAccessor args(rsp, 3, ARGUMENTS_DONT_CONTAIN_RECEIVER);
836 __ movp(rcx, Operand(rdx, ArgumentsAdaptorFrameConstants::kLengthOffset));
837 __ movp(args.GetArgumentOperand(2), rcx);
838 __ SmiToInteger64(rcx, rcx);
839 __ leap(rdx, Operand(rdx, rcx, times_pointer_size,
840 StandardFrameConstants::kCallerSPOffset));
841 __ movp(args.GetArgumentOperand(1), rdx);
844 __ TailCallRuntime(Runtime::kNewSloppyArguments, 3, 1);
848 void RestParamAccessStub::GenerateNew(MacroAssembler* masm) {
849 // rsp[0] : return address
850 // rsp[8] : index of rest parameter
851 // rsp[16] : number of parameters
852 // rsp[24] : receiver displacement
854 // Check if the calling frame is an arguments adaptor frame.
856 __ movp(rdx, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
857 __ movp(rcx, Operand(rdx, StandardFrameConstants::kContextOffset));
858 __ Cmp(rcx, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
859 __ j(not_equal, &runtime);
861 // Patch the arguments.length and the parameters pointer.
862 StackArgumentsAccessor args(rsp, 3, ARGUMENTS_DONT_CONTAIN_RECEIVER);
863 __ movp(rcx, Operand(rdx, ArgumentsAdaptorFrameConstants::kLengthOffset));
864 __ movp(args.GetArgumentOperand(1), rcx);
865 __ SmiToInteger64(rcx, rcx);
866 __ leap(rdx, Operand(rdx, rcx, times_pointer_size,
867 StandardFrameConstants::kCallerSPOffset));
868 __ movp(args.GetArgumentOperand(0), rdx);
871 __ TailCallRuntime(Runtime::kNewRestParam, 3, 1);
875 void LoadIndexedInterceptorStub::Generate(MacroAssembler* masm) {
876 // Return address is on the stack.
879 Register receiver = LoadDescriptor::ReceiverRegister();
880 Register key = LoadDescriptor::NameRegister();
881 Register scratch = rax;
882 DCHECK(!scratch.is(receiver) && !scratch.is(key));
884 // Check that the key is an array index, that is Uint32.
885 STATIC_ASSERT(kSmiValueSize <= 32);
886 __ JumpUnlessNonNegativeSmi(key, &slow);
888 // Everything is fine, call runtime.
889 __ PopReturnAddressTo(scratch);
890 __ Push(receiver); // receiver
892 __ PushReturnAddressFrom(scratch);
894 // Perform tail call to the entry.
895 __ TailCallExternalReference(
896 ExternalReference(IC_Utility(IC::kLoadElementWithInterceptor),
901 PropertyAccessCompiler::TailCallBuiltin(
902 masm, PropertyAccessCompiler::MissBuiltin(Code::KEYED_LOAD_IC));
906 void LoadIndexedStringStub::Generate(MacroAssembler* masm) {
907 // Return address is on the stack.
910 Register receiver = LoadDescriptor::ReceiverRegister();
911 Register index = LoadDescriptor::NameRegister();
912 Register scratch = rdi;
913 Register result = rax;
914 DCHECK(!scratch.is(receiver) && !scratch.is(index));
915 DCHECK(!FLAG_vector_ics ||
916 (!scratch.is(VectorLoadICDescriptor::VectorRegister()) &&
917 result.is(VectorLoadICDescriptor::SlotRegister())));
919 // StringCharAtGenerator doesn't use the result register until it's passed
920 // the different miss possibilities. If it did, we would have a conflict
921 // when FLAG_vector_ics is true.
922 StringCharAtGenerator char_at_generator(receiver, index, scratch, result,
923 &miss, // When not a string.
924 &miss, // When not a number.
925 &miss, // When index out of range.
926 STRING_INDEX_IS_ARRAY_INDEX,
928 char_at_generator.GenerateFast(masm);
931 StubRuntimeCallHelper call_helper;
932 char_at_generator.GenerateSlow(masm, call_helper);
935 PropertyAccessCompiler::TailCallBuiltin(
936 masm, PropertyAccessCompiler::MissBuiltin(Code::KEYED_LOAD_IC));
940 void ArgumentsAccessStub::GenerateNewStrict(MacroAssembler* masm) {
941 // rsp[0] : return address
942 // rsp[8] : number of parameters
943 // rsp[16] : receiver displacement
944 // rsp[24] : function
946 // Check if the calling frame is an arguments adaptor frame.
947 Label adaptor_frame, try_allocate, runtime;
948 __ movp(rdx, Operand(rbp, StandardFrameConstants::kCallerFPOffset));
949 __ movp(rcx, Operand(rdx, StandardFrameConstants::kContextOffset));
950 __ Cmp(rcx, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR));
951 __ j(equal, &adaptor_frame);
953 // Get the length from the frame.
954 StackArgumentsAccessor args(rsp, 3, ARGUMENTS_DONT_CONTAIN_RECEIVER);
955 __ movp(rcx, args.GetArgumentOperand(2));
956 __ SmiToInteger64(rcx, rcx);
957 __ jmp(&try_allocate);
959 // Patch the arguments.length and the parameters pointer.
960 __ bind(&adaptor_frame);
961 __ movp(rcx, Operand(rdx, ArgumentsAdaptorFrameConstants::kLengthOffset));
963 if (has_new_target()) {
964 // Subtract 1 from smi-tagged arguments count.
965 __ SmiToInteger32(rcx, rcx);
967 __ Integer32ToSmi(rcx, rcx);
969 __ movp(args.GetArgumentOperand(2), rcx);
970 __ SmiToInteger64(rcx, rcx);
971 __ leap(rdx, Operand(rdx, rcx, times_pointer_size,
972 StandardFrameConstants::kCallerSPOffset));
973 __ movp(args.GetArgumentOperand(1), rdx);
975 // Try the new space allocation. Start out with computing the size of
976 // the arguments object and the elements array.
977 Label add_arguments_object;
978 __ bind(&try_allocate);
980 __ j(zero, &add_arguments_object, Label::kNear);
981 __ leap(rcx, Operand(rcx, times_pointer_size, FixedArray::kHeaderSize));
982 __ bind(&add_arguments_object);
983 __ addp(rcx, Immediate(Heap::kStrictArgumentsObjectSize));
985 // Do the allocation of both objects in one go.
986 __ Allocate(rcx, rax, rdx, rbx, &runtime, TAG_OBJECT);
988 // Get the arguments map from the current native context.
989 __ movp(rdi, Operand(rsi, Context::SlotOffset(Context::GLOBAL_OBJECT_INDEX)));
990 __ movp(rdi, FieldOperand(rdi, GlobalObject::kNativeContextOffset));
991 const int offset = Context::SlotOffset(Context::STRICT_ARGUMENTS_MAP_INDEX);
992 __ movp(rdi, Operand(rdi, offset));
994 __ movp(FieldOperand(rax, JSObject::kMapOffset), rdi);
995 __ LoadRoot(kScratchRegister, Heap::kEmptyFixedArrayRootIndex);
996 __ movp(FieldOperand(rax, JSObject::kPropertiesOffset), kScratchRegister);
997 __ movp(FieldOperand(rax, JSObject::kElementsOffset), kScratchRegister);
999 // Get the length (smi tagged) and set that as an in-object property too.
1000 STATIC_ASSERT(Heap::kArgumentsLengthIndex == 0);
1001 __ movp(rcx, args.GetArgumentOperand(2));
1002 __ movp(FieldOperand(rax, JSObject::kHeaderSize +
1003 Heap::kArgumentsLengthIndex * kPointerSize),
1006 // If there are no actual arguments, we're done.
1011 // Get the parameters pointer from the stack.
1012 __ movp(rdx, args.GetArgumentOperand(1));
1014 // Set up the elements pointer in the allocated arguments object and
1015 // initialize the header in the elements fixed array.
1016 __ leap(rdi, Operand(rax, Heap::kStrictArgumentsObjectSize));
1017 __ movp(FieldOperand(rax, JSObject::kElementsOffset), rdi);
1018 __ LoadRoot(kScratchRegister, Heap::kFixedArrayMapRootIndex);
1019 __ movp(FieldOperand(rdi, FixedArray::kMapOffset), kScratchRegister);
1022 __ movp(FieldOperand(rdi, FixedArray::kLengthOffset), rcx);
1023 // Untag the length for the loop below.
1024 __ SmiToInteger64(rcx, rcx);
1026 // Copy the fixed array slots.
1029 __ movp(rbx, Operand(rdx, -1 * kPointerSize)); // Skip receiver.
1030 __ movp(FieldOperand(rdi, FixedArray::kHeaderSize), rbx);
1031 __ addp(rdi, Immediate(kPointerSize));
1032 __ subp(rdx, Immediate(kPointerSize));
1034 __ j(not_zero, &loop);
1036 // Return and remove the on-stack parameters.
1038 __ ret(3 * kPointerSize);
1040 // Do the runtime call to allocate the arguments object.
1042 __ TailCallRuntime(Runtime::kNewStrictArguments, 3, 1);
1046 void RegExpExecStub::Generate(MacroAssembler* masm) {
1047 // Just jump directly to runtime if native RegExp is not selected at compile
1048 // time or if regexp entry in generated code is turned off runtime switch or
1050 #ifdef V8_INTERPRETED_REGEXP
1051 __ TailCallRuntime(Runtime::kRegExpExecRT, 4, 1);
1052 #else // V8_INTERPRETED_REGEXP
1054 // Stack frame on entry.
1055 // rsp[0] : return address
1056 // rsp[8] : last_match_info (expected JSArray)
1057 // rsp[16] : previous index
1058 // rsp[24] : subject string
1059 // rsp[32] : JSRegExp object
1061 enum RegExpExecStubArgumentIndices {
1062 JS_REG_EXP_OBJECT_ARGUMENT_INDEX,
1063 SUBJECT_STRING_ARGUMENT_INDEX,
1064 PREVIOUS_INDEX_ARGUMENT_INDEX,
1065 LAST_MATCH_INFO_ARGUMENT_INDEX,
1066 REG_EXP_EXEC_ARGUMENT_COUNT
1069 StackArgumentsAccessor args(rsp, REG_EXP_EXEC_ARGUMENT_COUNT,
1070 ARGUMENTS_DONT_CONTAIN_RECEIVER);
1072 // Ensure that a RegExp stack is allocated.
1073 ExternalReference address_of_regexp_stack_memory_address =
1074 ExternalReference::address_of_regexp_stack_memory_address(isolate());
1075 ExternalReference address_of_regexp_stack_memory_size =
1076 ExternalReference::address_of_regexp_stack_memory_size(isolate());
1077 __ Load(kScratchRegister, address_of_regexp_stack_memory_size);
1078 __ testp(kScratchRegister, kScratchRegister);
1079 __ j(zero, &runtime);
1081 // Check that the first argument is a JSRegExp object.
1082 __ movp(rax, args.GetArgumentOperand(JS_REG_EXP_OBJECT_ARGUMENT_INDEX));
1083 __ JumpIfSmi(rax, &runtime);
1084 __ CmpObjectType(rax, JS_REGEXP_TYPE, kScratchRegister);
1085 __ j(not_equal, &runtime);
1087 // Check that the RegExp has been compiled (data contains a fixed array).
1088 __ movp(rax, FieldOperand(rax, JSRegExp::kDataOffset));
1089 if (FLAG_debug_code) {
1090 Condition is_smi = masm->CheckSmi(rax);
1091 __ Check(NegateCondition(is_smi),
1092 kUnexpectedTypeForRegExpDataFixedArrayExpected);
1093 __ CmpObjectType(rax, FIXED_ARRAY_TYPE, kScratchRegister);
1094 __ Check(equal, kUnexpectedTypeForRegExpDataFixedArrayExpected);
1097 // rax: RegExp data (FixedArray)
1098 // Check the type of the RegExp. Only continue if type is JSRegExp::IRREGEXP.
1099 __ SmiToInteger32(rbx, FieldOperand(rax, JSRegExp::kDataTagOffset));
1100 __ cmpl(rbx, Immediate(JSRegExp::IRREGEXP));
1101 __ j(not_equal, &runtime);
1103 // rax: RegExp data (FixedArray)
1104 // Check that the number of captures fit in the static offsets vector buffer.
1105 __ SmiToInteger32(rdx,
1106 FieldOperand(rax, JSRegExp::kIrregexpCaptureCountOffset));
1107 // Check (number_of_captures + 1) * 2 <= offsets vector size
1108 // Or number_of_captures <= offsets vector size / 2 - 1
1109 STATIC_ASSERT(Isolate::kJSRegexpStaticOffsetsVectorSize >= 2);
1110 __ cmpl(rdx, Immediate(Isolate::kJSRegexpStaticOffsetsVectorSize / 2 - 1));
1111 __ j(above, &runtime);
1113 // Reset offset for possibly sliced string.
1115 __ movp(rdi, args.GetArgumentOperand(SUBJECT_STRING_ARGUMENT_INDEX));
1116 __ JumpIfSmi(rdi, &runtime);
1117 __ movp(r15, rdi); // Make a copy of the original subject string.
1118 __ movp(rbx, FieldOperand(rdi, HeapObject::kMapOffset));
1119 __ movzxbl(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset));
1120 // rax: RegExp data (FixedArray)
1121 // rdi: subject string
1122 // r15: subject string
1123 // Handle subject string according to its encoding and representation:
1124 // (1) Sequential two byte? If yes, go to (9).
1125 // (2) Sequential one byte? If yes, go to (6).
1126 // (3) Anything but sequential or cons? If yes, go to (7).
1127 // (4) Cons string. If the string is flat, replace subject with first string.
1128 // Otherwise bailout.
1129 // (5a) Is subject sequential two byte? If yes, go to (9).
1130 // (5b) Is subject external? If yes, go to (8).
1131 // (6) One byte sequential. Load regexp code for one byte.
1135 // Deferred code at the end of the stub:
1136 // (7) Not a long external string? If yes, go to (10).
1137 // (8) External string. Make it, offset-wise, look like a sequential string.
1138 // (8a) Is the external string one byte? If yes, go to (6).
1139 // (9) Two byte sequential. Load regexp code for one byte. Go to (E).
1140 // (10) Short external string or not a string? If yes, bail out to runtime.
1141 // (11) Sliced string. Replace subject with parent. Go to (5a).
1143 Label seq_one_byte_string /* 6 */, seq_two_byte_string /* 9 */,
1144 external_string /* 8 */, check_underlying /* 5a */,
1145 not_seq_nor_cons /* 7 */, check_code /* E */,
1146 not_long_external /* 10 */;
1148 // (1) Sequential two byte? If yes, go to (9).
1149 __ andb(rbx, Immediate(kIsNotStringMask |
1150 kStringRepresentationMask |
1151 kStringEncodingMask |
1152 kShortExternalStringMask));
1153 STATIC_ASSERT((kStringTag | kSeqStringTag | kTwoByteStringTag) == 0);
1154 __ j(zero, &seq_two_byte_string); // Go to (9).
1156 // (2) Sequential one byte? If yes, go to (6).
1157 // Any other sequential string must be one byte.
1158 __ andb(rbx, Immediate(kIsNotStringMask |
1159 kStringRepresentationMask |
1160 kShortExternalStringMask));
1161 __ j(zero, &seq_one_byte_string, Label::kNear); // Go to (6).
1163 // (3) Anything but sequential or cons? If yes, go to (7).
1164 // We check whether the subject string is a cons, since sequential strings
1165 // have already been covered.
1166 STATIC_ASSERT(kConsStringTag < kExternalStringTag);
1167 STATIC_ASSERT(kSlicedStringTag > kExternalStringTag);
1168 STATIC_ASSERT(kIsNotStringMask > kExternalStringTag);
1169 STATIC_ASSERT(kShortExternalStringTag > kExternalStringTag);
1170 __ cmpp(rbx, Immediate(kExternalStringTag));
1171 __ j(greater_equal, ¬_seq_nor_cons); // Go to (7).
1173 // (4) Cons string. Check that it's flat.
1174 // Replace subject with first string and reload instance type.
1175 __ CompareRoot(FieldOperand(rdi, ConsString::kSecondOffset),
1176 Heap::kempty_stringRootIndex);
1177 __ j(not_equal, &runtime);
1178 __ movp(rdi, FieldOperand(rdi, ConsString::kFirstOffset));
1179 __ bind(&check_underlying);
1180 __ movp(rbx, FieldOperand(rdi, HeapObject::kMapOffset));
1181 __ movp(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset));
1183 // (5a) Is subject sequential two byte? If yes, go to (9).
1184 __ testb(rbx, Immediate(kStringRepresentationMask | kStringEncodingMask));
1185 STATIC_ASSERT((kSeqStringTag | kTwoByteStringTag) == 0);
1186 __ j(zero, &seq_two_byte_string); // Go to (9).
1187 // (5b) Is subject external? If yes, go to (8).
1188 __ testb(rbx, Immediate(kStringRepresentationMask));
1189 // The underlying external string is never a short external string.
1190 STATIC_ASSERT(ExternalString::kMaxShortLength < ConsString::kMinLength);
1191 STATIC_ASSERT(ExternalString::kMaxShortLength < SlicedString::kMinLength);
1192 __ j(not_zero, &external_string); // Go to (8)
1194 // (6) One byte sequential. Load regexp code for one byte.
1195 __ bind(&seq_one_byte_string);
1196 // rax: RegExp data (FixedArray)
1197 __ movp(r11, FieldOperand(rax, JSRegExp::kDataOneByteCodeOffset));
1198 __ Set(rcx, 1); // Type is one byte.
1200 // (E) Carry on. String handling is done.
1201 __ bind(&check_code);
1202 // r11: irregexp code
1203 // Check that the irregexp code has been generated for the actual string
1204 // encoding. If it has, the field contains a code object otherwise it contains
1205 // smi (code flushing support)
1206 __ JumpIfSmi(r11, &runtime);
1208 // rdi: sequential subject string (or look-alike, external string)
1209 // r15: original subject string
1210 // rcx: encoding of subject string (1 if one_byte, 0 if two_byte);
1212 // Load used arguments before starting to push arguments for call to native
1213 // RegExp code to avoid handling changing stack height.
1214 // We have to use r15 instead of rdi to load the length because rdi might
1215 // have been only made to look like a sequential string when it actually
1216 // is an external string.
1217 __ movp(rbx, args.GetArgumentOperand(PREVIOUS_INDEX_ARGUMENT_INDEX));
1218 __ JumpIfNotSmi(rbx, &runtime);
1219 __ SmiCompare(rbx, FieldOperand(r15, String::kLengthOffset));
1220 __ j(above_equal, &runtime);
1221 __ SmiToInteger64(rbx, rbx);
1223 // rdi: subject string
1224 // rbx: previous index
1225 // rcx: encoding of subject string (1 if one_byte 0 if two_byte);
1227 // All checks done. Now push arguments for native regexp code.
1228 Counters* counters = isolate()->counters();
1229 __ IncrementCounter(counters->regexp_entry_native(), 1);
1231 // Isolates: note we add an additional parameter here (isolate pointer).
1232 static const int kRegExpExecuteArguments = 9;
1233 int argument_slots_on_stack =
1234 masm->ArgumentStackSlotsForCFunctionCall(kRegExpExecuteArguments);
1235 __ EnterApiExitFrame(argument_slots_on_stack);
1237 // Argument 9: Pass current isolate address.
1238 __ LoadAddress(kScratchRegister,
1239 ExternalReference::isolate_address(isolate()));
1240 __ movq(Operand(rsp, (argument_slots_on_stack - 1) * kRegisterSize),
1243 // Argument 8: Indicate that this is a direct call from JavaScript.
1244 __ movq(Operand(rsp, (argument_slots_on_stack - 2) * kRegisterSize),
1247 // Argument 7: Start (high end) of backtracking stack memory area.
1248 __ Move(kScratchRegister, address_of_regexp_stack_memory_address);
1249 __ movp(r9, Operand(kScratchRegister, 0));
1250 __ Move(kScratchRegister, address_of_regexp_stack_memory_size);
1251 __ addp(r9, Operand(kScratchRegister, 0));
1252 __ movq(Operand(rsp, (argument_slots_on_stack - 3) * kRegisterSize), r9);
1254 // Argument 6: Set the number of capture registers to zero to force global
1255 // regexps to behave as non-global. This does not affect non-global regexps.
1256 // Argument 6 is passed in r9 on Linux and on the stack on Windows.
1258 __ movq(Operand(rsp, (argument_slots_on_stack - 4) * kRegisterSize),
1264 // Argument 5: static offsets vector buffer.
1266 r8, ExternalReference::address_of_static_offsets_vector(isolate()));
1267 // Argument 5 passed in r8 on Linux and on the stack on Windows.
1269 __ movq(Operand(rsp, (argument_slots_on_stack - 5) * kRegisterSize), r8);
1272 // rdi: subject string
1273 // rbx: previous index
1274 // rcx: encoding of subject string (1 if one_byte 0 if two_byte);
1276 // r14: slice offset
1277 // r15: original subject string
1279 // Argument 2: Previous index.
1280 __ movp(arg_reg_2, rbx);
1282 // Argument 4: End of string data
1283 // Argument 3: Start of string data
1284 Label setup_two_byte, setup_rest, got_length, length_not_from_slice;
1285 // Prepare start and end index of the input.
1286 // Load the length from the original sliced string if that is the case.
1288 __ SmiToInteger32(arg_reg_3, FieldOperand(r15, String::kLengthOffset));
1289 __ addp(r14, arg_reg_3); // Using arg3 as scratch.
1291 // rbx: start index of the input
1292 // r14: end index of the input
1293 // r15: original subject string
1294 __ testb(rcx, rcx); // Last use of rcx as encoding of subject string.
1295 __ j(zero, &setup_two_byte, Label::kNear);
1297 FieldOperand(rdi, r14, times_1, SeqOneByteString::kHeaderSize));
1299 FieldOperand(rdi, rbx, times_1, SeqOneByteString::kHeaderSize));
1300 __ jmp(&setup_rest, Label::kNear);
1301 __ bind(&setup_two_byte);
1303 FieldOperand(rdi, r14, times_2, SeqTwoByteString::kHeaderSize));
1305 FieldOperand(rdi, rbx, times_2, SeqTwoByteString::kHeaderSize));
1306 __ bind(&setup_rest);
1308 // Argument 1: Original subject string.
1309 // The original subject is in the previous stack frame. Therefore we have to
1310 // use rbp, which points exactly to one pointer size below the previous rsp.
1311 // (Because creating a new stack frame pushes the previous rbp onto the stack
1312 // and thereby moves up rsp by one kPointerSize.)
1313 __ movp(arg_reg_1, r15);
1315 // Locate the code entry and call it.
1316 __ addp(r11, Immediate(Code::kHeaderSize - kHeapObjectTag));
1319 __ LeaveApiExitFrame(true);
1321 // Check the result.
1324 __ cmpl(rax, Immediate(1));
1325 // We expect exactly one result since we force the called regexp to behave
1327 __ j(equal, &success, Label::kNear);
1328 __ cmpl(rax, Immediate(NativeRegExpMacroAssembler::EXCEPTION));
1329 __ j(equal, &exception);
1330 __ cmpl(rax, Immediate(NativeRegExpMacroAssembler::FAILURE));
1331 // If none of the above, it can only be retry.
1332 // Handle that in the runtime system.
1333 __ j(not_equal, &runtime);
1335 // For failure return null.
1336 __ LoadRoot(rax, Heap::kNullValueRootIndex);
1337 __ ret(REG_EXP_EXEC_ARGUMENT_COUNT * kPointerSize);
1339 // Load RegExp data.
1341 __ movp(rax, args.GetArgumentOperand(JS_REG_EXP_OBJECT_ARGUMENT_INDEX));
1342 __ movp(rcx, FieldOperand(rax, JSRegExp::kDataOffset));
1343 __ SmiToInteger32(rax,
1344 FieldOperand(rcx, JSRegExp::kIrregexpCaptureCountOffset));
1345 // Calculate number of capture registers (number_of_captures + 1) * 2.
1346 __ leal(rdx, Operand(rax, rax, times_1, 2));
1348 // rdx: Number of capture registers
1349 // Check that the fourth object is a JSArray object.
1350 __ movp(r15, args.GetArgumentOperand(LAST_MATCH_INFO_ARGUMENT_INDEX));
1351 __ JumpIfSmi(r15, &runtime);
1352 __ CmpObjectType(r15, JS_ARRAY_TYPE, kScratchRegister);
1353 __ j(not_equal, &runtime);
1354 // Check that the JSArray is in fast case.
1355 __ movp(rbx, FieldOperand(r15, JSArray::kElementsOffset));
1356 __ movp(rax, FieldOperand(rbx, HeapObject::kMapOffset));
1357 __ CompareRoot(rax, Heap::kFixedArrayMapRootIndex);
1358 __ j(not_equal, &runtime);
1359 // Check that the last match info has space for the capture registers and the
1360 // additional information. Ensure no overflow in add.
1361 STATIC_ASSERT(FixedArray::kMaxLength < kMaxInt - FixedArray::kLengthOffset);
1362 __ SmiToInteger32(rax, FieldOperand(rbx, FixedArray::kLengthOffset));
1363 __ subl(rax, Immediate(RegExpImpl::kLastMatchOverhead));
1365 __ j(greater, &runtime);
1367 // rbx: last_match_info backing store (FixedArray)
1368 // rdx: number of capture registers
1369 // Store the capture count.
1370 __ Integer32ToSmi(kScratchRegister, rdx);
1371 __ movp(FieldOperand(rbx, RegExpImpl::kLastCaptureCountOffset),
1373 // Store last subject and last input.
1374 __ movp(rax, args.GetArgumentOperand(SUBJECT_STRING_ARGUMENT_INDEX));
1375 __ movp(FieldOperand(rbx, RegExpImpl::kLastSubjectOffset), rax);
1377 __ RecordWriteField(rbx,
1378 RegExpImpl::kLastSubjectOffset,
1383 __ movp(FieldOperand(rbx, RegExpImpl::kLastInputOffset), rax);
1384 __ RecordWriteField(rbx,
1385 RegExpImpl::kLastInputOffset,
1390 // Get the static offsets vector filled by the native regexp code.
1392 rcx, ExternalReference::address_of_static_offsets_vector(isolate()));
1394 // rbx: last_match_info backing store (FixedArray)
1395 // rcx: offsets vector
1396 // rdx: number of capture registers
1397 Label next_capture, done;
1398 // Capture register counter starts from number of capture registers and
1399 // counts down until wraping after zero.
1400 __ bind(&next_capture);
1401 __ subp(rdx, Immediate(1));
1402 __ j(negative, &done, Label::kNear);
1403 // Read the value from the static offsets vector buffer and make it a smi.
1404 __ movl(rdi, Operand(rcx, rdx, times_int_size, 0));
1405 __ Integer32ToSmi(rdi, rdi);
1406 // Store the smi value in the last match info.
1407 __ movp(FieldOperand(rbx,
1410 RegExpImpl::kFirstCaptureOffset),
1412 __ jmp(&next_capture);
1415 // Return last match info.
1417 __ ret(REG_EXP_EXEC_ARGUMENT_COUNT * kPointerSize);
1419 __ bind(&exception);
1420 // Result must now be exception. If there is no pending exception already a
1421 // stack overflow (on the backtrack stack) was detected in RegExp code but
1422 // haven't created the exception yet. Handle that in the runtime system.
1423 // TODO(592): Rerunning the RegExp to get the stack overflow exception.
1424 ExternalReference pending_exception_address(
1425 Isolate::kPendingExceptionAddress, isolate());
1426 Operand pending_exception_operand =
1427 masm->ExternalOperand(pending_exception_address, rbx);
1428 __ movp(rax, pending_exception_operand);
1429 __ LoadRoot(rdx, Heap::kTheHoleValueRootIndex);
1431 __ j(equal, &runtime);
1432 __ movp(pending_exception_operand, rdx);
1434 __ CompareRoot(rax, Heap::kTerminationExceptionRootIndex);
1435 Label termination_exception;
1436 __ j(equal, &termination_exception, Label::kNear);
1439 __ bind(&termination_exception);
1440 __ ThrowUncatchable(rax);
1442 // Do the runtime call to execute the regexp.
1444 __ TailCallRuntime(Runtime::kRegExpExecRT, 4, 1);
1446 // Deferred code for string handling.
1447 // (7) Not a long external string? If yes, go to (10).
1448 __ bind(¬_seq_nor_cons);
1449 // Compare flags are still set from (3).
1450 __ j(greater, ¬_long_external, Label::kNear); // Go to (10).
1452 // (8) External string. Short external strings have been ruled out.
1453 __ bind(&external_string);
1454 __ movp(rbx, FieldOperand(rdi, HeapObject::kMapOffset));
1455 __ movzxbl(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset));
1456 if (FLAG_debug_code) {
1457 // Assert that we do not have a cons or slice (indirect strings) here.
1458 // Sequential strings have already been ruled out.
1459 __ testb(rbx, Immediate(kIsIndirectStringMask));
1460 __ Assert(zero, kExternalStringExpectedButNotFound);
1462 __ movp(rdi, FieldOperand(rdi, ExternalString::kResourceDataOffset));
1463 // Move the pointer so that offset-wise, it looks like a sequential string.
1464 STATIC_ASSERT(SeqTwoByteString::kHeaderSize == SeqOneByteString::kHeaderSize);
1465 __ subp(rdi, Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
1466 STATIC_ASSERT(kTwoByteStringTag == 0);
1467 // (8a) Is the external string one byte? If yes, go to (6).
1468 __ testb(rbx, Immediate(kStringEncodingMask));
1469 __ j(not_zero, &seq_one_byte_string); // Goto (6).
1471 // rdi: subject string (flat two-byte)
1472 // rax: RegExp data (FixedArray)
1473 // (9) Two byte sequential. Load regexp code for one byte. Go to (E).
1474 __ bind(&seq_two_byte_string);
1475 __ movp(r11, FieldOperand(rax, JSRegExp::kDataUC16CodeOffset));
1476 __ Set(rcx, 0); // Type is two byte.
1477 __ jmp(&check_code); // Go to (E).
1479 // (10) Not a string or a short external string? If yes, bail out to runtime.
1480 __ bind(¬_long_external);
1481 // Catch non-string subject or short external string.
1482 STATIC_ASSERT(kNotStringTag != 0 && kShortExternalStringTag !=0);
1483 __ testb(rbx, Immediate(kIsNotStringMask | kShortExternalStringMask));
1484 __ j(not_zero, &runtime);
1486 // (11) Sliced string. Replace subject with parent. Go to (5a).
1487 // Load offset into r14 and replace subject string with parent.
1488 __ SmiToInteger32(r14, FieldOperand(rdi, SlicedString::kOffsetOffset));
1489 __ movp(rdi, FieldOperand(rdi, SlicedString::kParentOffset));
1490 __ jmp(&check_underlying);
1491 #endif // V8_INTERPRETED_REGEXP
1495 static int NegativeComparisonResult(Condition cc) {
1496 DCHECK(cc != equal);
1497 DCHECK((cc == less) || (cc == less_equal)
1498 || (cc == greater) || (cc == greater_equal));
1499 return (cc == greater || cc == greater_equal) ? LESS : GREATER;
1503 static void CheckInputType(MacroAssembler* masm, Register input,
1504 CompareICState::State expected, Label* fail) {
1506 if (expected == CompareICState::SMI) {
1507 __ JumpIfNotSmi(input, fail);
1508 } else if (expected == CompareICState::NUMBER) {
1509 __ JumpIfSmi(input, &ok);
1510 __ CompareMap(input, masm->isolate()->factory()->heap_number_map());
1511 __ j(not_equal, fail);
1513 // We could be strict about internalized/non-internalized here, but as long as
1514 // hydrogen doesn't care, the stub doesn't have to care either.
1519 static void BranchIfNotInternalizedString(MacroAssembler* masm,
1523 __ JumpIfSmi(object, label);
1524 __ movp(scratch, FieldOperand(object, HeapObject::kMapOffset));
1526 FieldOperand(scratch, Map::kInstanceTypeOffset));
1527 STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0);
1528 __ testb(scratch, Immediate(kIsNotStringMask | kIsNotInternalizedMask));
1529 __ j(not_zero, label);
1533 void CompareICStub::GenerateGeneric(MacroAssembler* masm) {
1534 Label check_unequal_objects, done;
1535 Condition cc = GetCondition();
1536 Factory* factory = isolate()->factory();
1539 CheckInputType(masm, rdx, left(), &miss);
1540 CheckInputType(masm, rax, right(), &miss);
1542 // Compare two smis.
1543 Label non_smi, smi_done;
1544 __ JumpIfNotBothSmi(rax, rdx, &non_smi);
1546 __ j(no_overflow, &smi_done);
1547 __ notp(rdx); // Correct sign in case of overflow. rdx cannot be 0 here.
1553 // The compare stub returns a positive, negative, or zero 64-bit integer
1554 // value in rax, corresponding to result of comparing the two inputs.
1555 // NOTICE! This code is only reached after a smi-fast-case check, so
1556 // it is certain that at least one operand isn't a smi.
1558 // Two identical objects are equal unless they are both NaN or undefined.
1560 Label not_identical;
1562 __ j(not_equal, ¬_identical, Label::kNear);
1565 // Check for undefined. undefined OP undefined is false even though
1566 // undefined == undefined.
1567 Label check_for_nan;
1568 __ CompareRoot(rdx, Heap::kUndefinedValueRootIndex);
1569 __ j(not_equal, &check_for_nan, Label::kNear);
1570 __ Set(rax, NegativeComparisonResult(cc));
1572 __ bind(&check_for_nan);
1575 // Test for NaN. Sadly, we can't just compare to Factory::nan_value(),
1576 // so we do the second best thing - test it ourselves.
1578 // If it's not a heap number, then return equal for (in)equality operator.
1579 __ Cmp(FieldOperand(rdx, HeapObject::kMapOffset),
1580 factory->heap_number_map());
1581 __ j(equal, &heap_number, Label::kNear);
1583 // Call runtime on identical objects. Otherwise return equal.
1584 __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rcx);
1585 __ j(above_equal, ¬_identical, Label::kNear);
1590 __ bind(&heap_number);
1591 // It is a heap number, so return equal if it's not NaN.
1592 // For NaN, return 1 for every condition except greater and
1593 // greater-equal. Return -1 for them, so the comparison yields
1594 // false for all conditions except not-equal.
1596 __ movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset));
1597 __ ucomisd(xmm0, xmm0);
1598 __ setcc(parity_even, rax);
1599 // rax is 0 for equal non-NaN heapnumbers, 1 for NaNs.
1600 if (cc == greater_equal || cc == greater) {
1605 __ bind(¬_identical);
1608 if (cc == equal) { // Both strict and non-strict.
1609 Label slow; // Fallthrough label.
1611 // If we're doing a strict equality comparison, we don't have to do
1612 // type conversion, so we generate code to do fast comparison for objects
1613 // and oddballs. Non-smi numbers and strings still go through the usual
1616 // If either is a Smi (we know that not both are), then they can only
1617 // be equal if the other is a HeapNumber. If so, use the slow case.
1620 __ SelectNonSmi(rbx, rax, rdx, ¬_smis);
1622 // Check if the non-smi operand is a heap number.
1623 __ Cmp(FieldOperand(rbx, HeapObject::kMapOffset),
1624 factory->heap_number_map());
1625 // If heap number, handle it in the slow case.
1627 // Return non-equal. ebx (the lower half of rbx) is not zero.
1634 // If either operand is a JSObject or an oddball value, then they are not
1635 // equal since their pointers are different
1636 // There is no test for undetectability in strict equality.
1638 // If the first object is a JS object, we have done pointer comparison.
1639 STATIC_ASSERT(LAST_TYPE == LAST_SPEC_OBJECT_TYPE);
1640 Label first_non_object;
1641 __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rcx);
1642 __ j(below, &first_non_object, Label::kNear);
1643 // Return non-zero (rax (not rax) is not zero)
1644 Label return_not_equal;
1645 STATIC_ASSERT(kHeapObjectTag != 0);
1646 __ bind(&return_not_equal);
1649 __ bind(&first_non_object);
1650 // Check for oddballs: true, false, null, undefined.
1651 __ CmpInstanceType(rcx, ODDBALL_TYPE);
1652 __ j(equal, &return_not_equal);
1654 __ CmpObjectType(rdx, FIRST_SPEC_OBJECT_TYPE, rcx);
1655 __ j(above_equal, &return_not_equal);
1657 // Check for oddballs: true, false, null, undefined.
1658 __ CmpInstanceType(rcx, ODDBALL_TYPE);
1659 __ j(equal, &return_not_equal);
1661 // Fall through to the general case.
1666 // Generate the number comparison code.
1667 Label non_number_comparison;
1669 FloatingPointHelper::LoadSSE2UnknownOperands(masm, &non_number_comparison);
1672 __ ucomisd(xmm0, xmm1);
1674 // Don't base result on EFLAGS when a NaN is involved.
1675 __ j(parity_even, &unordered, Label::kNear);
1676 // Return a result of -1, 0, or 1, based on EFLAGS.
1677 __ setcc(above, rax);
1678 __ setcc(below, rcx);
1682 // If one of the numbers was NaN, then the result is always false.
1683 // The cc is never not-equal.
1684 __ bind(&unordered);
1685 DCHECK(cc != not_equal);
1686 if (cc == less || cc == less_equal) {
1693 // The number comparison code did not provide a valid result.
1694 __ bind(&non_number_comparison);
1696 // Fast negative check for internalized-to-internalized equality.
1697 Label check_for_strings;
1699 BranchIfNotInternalizedString(
1700 masm, &check_for_strings, rax, kScratchRegister);
1701 BranchIfNotInternalizedString(
1702 masm, &check_for_strings, rdx, kScratchRegister);
1704 // We've already checked for object identity, so if both operands are
1705 // internalized strings they aren't equal. Register rax (not rax) already
1706 // holds a non-zero value, which indicates not equal, so just return.
1710 __ bind(&check_for_strings);
1712 __ JumpIfNotBothSequentialOneByteStrings(rdx, rax, rcx, rbx,
1713 &check_unequal_objects);
1715 // Inline comparison of one-byte strings.
1717 StringHelper::GenerateFlatOneByteStringEquals(masm, rdx, rax, rcx, rbx);
1719 StringHelper::GenerateCompareFlatOneByteStrings(masm, rdx, rax, rcx, rbx,
1724 __ Abort(kUnexpectedFallThroughFromStringComparison);
1727 __ bind(&check_unequal_objects);
1728 if (cc == equal && !strict()) {
1729 // Not strict equality. Objects are unequal if
1730 // they are both JSObjects and not undetectable,
1731 // and their pointers are different.
1732 Label not_both_objects, return_unequal;
1733 // At most one is a smi, so we can test for smi by adding the two.
1734 // A smi plus a heap object has the low bit set, a heap object plus
1735 // a heap object has the low bit clear.
1736 STATIC_ASSERT(kSmiTag == 0);
1737 STATIC_ASSERT(kSmiTagMask == 1);
1738 __ leap(rcx, Operand(rax, rdx, times_1, 0));
1739 __ testb(rcx, Immediate(kSmiTagMask));
1740 __ j(not_zero, ¬_both_objects, Label::kNear);
1741 __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rbx);
1742 __ j(below, ¬_both_objects, Label::kNear);
1743 __ CmpObjectType(rdx, FIRST_SPEC_OBJECT_TYPE, rcx);
1744 __ j(below, ¬_both_objects, Label::kNear);
1745 __ testb(FieldOperand(rbx, Map::kBitFieldOffset),
1746 Immediate(1 << Map::kIsUndetectable));
1747 __ j(zero, &return_unequal, Label::kNear);
1748 __ testb(FieldOperand(rcx, Map::kBitFieldOffset),
1749 Immediate(1 << Map::kIsUndetectable));
1750 __ j(zero, &return_unequal, Label::kNear);
1751 // The objects are both undetectable, so they both compare as the value
1752 // undefined, and are equal.
1754 __ bind(&return_unequal);
1755 // Return non-equal by returning the non-zero object pointer in rax,
1756 // or return equal if we fell through to here.
1758 __ bind(¬_both_objects);
1761 // Push arguments below the return address to prepare jump to builtin.
1762 __ PopReturnAddressTo(rcx);
1766 // Figure out which native to call and setup the arguments.
1767 Builtins::JavaScript builtin;
1769 builtin = strict() ? Builtins::STRICT_EQUALS : Builtins::EQUALS;
1771 builtin = Builtins::COMPARE;
1772 __ Push(Smi::FromInt(NegativeComparisonResult(cc)));
1775 __ PushReturnAddressFrom(rcx);
1777 // Call the native; it returns -1 (less), 0 (equal), or 1 (greater)
1778 // tagged as a small integer.
1779 __ InvokeBuiltin(builtin, JUMP_FUNCTION);
1786 static void GenerateRecordCallTarget(MacroAssembler* masm) {
1787 // Cache the called function in a feedback vector slot. Cache states
1788 // are uninitialized, monomorphic (indicated by a JSFunction), and
1790 // rax : number of arguments to the construct function
1791 // rbx : Feedback vector
1792 // rdx : slot in feedback vector (Smi)
1793 // rdi : the function to call
1794 Isolate* isolate = masm->isolate();
1795 Label initialize, done, miss, megamorphic, not_array_function,
1796 done_no_smi_convert;
1798 // Load the cache state into rcx.
1799 __ SmiToInteger32(rdx, rdx);
1800 __ movp(rcx, FieldOperand(rbx, rdx, times_pointer_size,
1801 FixedArray::kHeaderSize));
1803 // A monomorphic cache hit or an already megamorphic state: invoke the
1804 // function without changing the state.
1807 __ Cmp(rcx, TypeFeedbackVector::MegamorphicSentinel(isolate));
1810 if (!FLAG_pretenuring_call_new) {
1811 // If we came here, we need to see if we are the array function.
1812 // If we didn't have a matching function, and we didn't find the megamorph
1813 // sentinel, then we have in the slot either some other function or an
1814 // AllocationSite. Do a map check on the object in rcx.
1815 Handle<Map> allocation_site_map =
1816 masm->isolate()->factory()->allocation_site_map();
1817 __ Cmp(FieldOperand(rcx, 0), allocation_site_map);
1818 __ j(not_equal, &miss);
1820 // Make sure the function is the Array() function
1821 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, rcx);
1823 __ j(not_equal, &megamorphic);
1829 // A monomorphic miss (i.e, here the cache is not uninitialized) goes
1831 __ Cmp(rcx, TypeFeedbackVector::UninitializedSentinel(isolate));
1832 __ j(equal, &initialize);
1833 // MegamorphicSentinel is an immortal immovable object (undefined) so no
1834 // write-barrier is needed.
1835 __ bind(&megamorphic);
1836 __ Move(FieldOperand(rbx, rdx, times_pointer_size, FixedArray::kHeaderSize),
1837 TypeFeedbackVector::MegamorphicSentinel(isolate));
1840 // An uninitialized cache is patched with the function or sentinel to
1841 // indicate the ElementsKind if function is the Array constructor.
1842 __ bind(&initialize);
1844 if (!FLAG_pretenuring_call_new) {
1845 // Make sure the function is the Array() function
1846 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, rcx);
1848 __ j(not_equal, ¬_array_function);
1851 FrameScope scope(masm, StackFrame::INTERNAL);
1853 // Arguments register must be smi-tagged to call out.
1854 __ Integer32ToSmi(rax, rax);
1857 __ Integer32ToSmi(rdx, rdx);
1861 CreateAllocationSiteStub create_stub(isolate);
1862 __ CallStub(&create_stub);
1868 __ SmiToInteger32(rax, rax);
1870 __ jmp(&done_no_smi_convert);
1872 __ bind(¬_array_function);
1875 __ movp(FieldOperand(rbx, rdx, times_pointer_size, FixedArray::kHeaderSize),
1878 // We won't need rdx or rbx anymore, just save rdi
1882 __ RecordWriteArray(rbx, rdi, rdx, kDontSaveFPRegs,
1883 EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
1889 __ Integer32ToSmi(rdx, rdx);
1891 __ bind(&done_no_smi_convert);
1895 static void EmitContinueIfStrictOrNative(MacroAssembler* masm, Label* cont) {
1896 // Do not transform the receiver for strict mode functions.
1897 __ movp(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
1898 __ testb(FieldOperand(rcx, SharedFunctionInfo::kStrictModeByteOffset),
1899 Immediate(1 << SharedFunctionInfo::kStrictModeBitWithinByte));
1900 __ j(not_equal, cont);
1902 // Do not transform the receiver for natives.
1903 // SharedFunctionInfo is already loaded into rcx.
1904 __ testb(FieldOperand(rcx, SharedFunctionInfo::kNativeByteOffset),
1905 Immediate(1 << SharedFunctionInfo::kNativeBitWithinByte));
1906 __ j(not_equal, cont);
1910 static void EmitSlowCase(Isolate* isolate,
1911 MacroAssembler* masm,
1912 StackArgumentsAccessor* args,
1914 Label* non_function) {
1915 // Check for function proxy.
1916 __ CmpInstanceType(rcx, JS_FUNCTION_PROXY_TYPE);
1917 __ j(not_equal, non_function);
1918 __ PopReturnAddressTo(rcx);
1919 __ Push(rdi); // put proxy as additional argument under return address
1920 __ PushReturnAddressFrom(rcx);
1921 __ Set(rax, argc + 1);
1923 __ GetBuiltinEntry(rdx, Builtins::CALL_FUNCTION_PROXY);
1925 Handle<Code> adaptor =
1926 masm->isolate()->builtins()->ArgumentsAdaptorTrampoline();
1927 __ jmp(adaptor, RelocInfo::CODE_TARGET);
1930 // CALL_NON_FUNCTION expects the non-function callee as receiver (instead
1931 // of the original receiver from the call site).
1932 __ bind(non_function);
1933 __ movp(args->GetReceiverOperand(), rdi);
1936 __ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION);
1937 Handle<Code> adaptor =
1938 isolate->builtins()->ArgumentsAdaptorTrampoline();
1939 __ Jump(adaptor, RelocInfo::CODE_TARGET);
1943 static void EmitWrapCase(MacroAssembler* masm,
1944 StackArgumentsAccessor* args,
1946 // Wrap the receiver and patch it back onto the stack.
1947 { FrameScope frame_scope(masm, StackFrame::INTERNAL);
1950 __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
1953 __ movp(args->GetReceiverOperand(), rax);
1958 static void CallFunctionNoFeedback(MacroAssembler* masm,
1959 int argc, bool needs_checks,
1960 bool call_as_method) {
1961 // rdi : the function to call
1963 // wrap_and_call can only be true if we are compiling a monomorphic method.
1964 Isolate* isolate = masm->isolate();
1965 Label slow, non_function, wrap, cont;
1966 StackArgumentsAccessor args(rsp, argc);
1969 // Check that the function really is a JavaScript function.
1970 __ JumpIfSmi(rdi, &non_function);
1972 // Goto slow case if we do not have a function.
1973 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
1974 __ j(not_equal, &slow);
1977 // Fast-case: Just invoke the function.
1978 ParameterCount actual(argc);
1980 if (call_as_method) {
1982 EmitContinueIfStrictOrNative(masm, &cont);
1985 // Load the receiver from the stack.
1986 __ movp(rax, args.GetReceiverOperand());
1989 __ JumpIfSmi(rax, &wrap);
1991 __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rcx);
2000 __ InvokeFunction(rdi, actual, JUMP_FUNCTION, NullCallWrapper());
2003 // Slow-case: Non-function called.
2005 EmitSlowCase(isolate, masm, &args, argc, &non_function);
2008 if (call_as_method) {
2010 EmitWrapCase(masm, &args, &cont);
2015 void CallFunctionStub::Generate(MacroAssembler* masm) {
2016 CallFunctionNoFeedback(masm, argc(), NeedsChecks(), CallAsMethod());
2020 void CallConstructStub::Generate(MacroAssembler* masm) {
2021 // rax : number of arguments
2022 // rbx : feedback vector
2023 // rdx : (only if rbx is not the megamorphic symbol) slot in feedback
2025 // rdi : constructor function
2026 Label slow, non_function_call;
2028 // Check that function is not a smi.
2029 __ JumpIfSmi(rdi, &non_function_call);
2030 // Check that function is a JSFunction.
2031 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
2032 __ j(not_equal, &slow);
2034 if (RecordCallTarget()) {
2035 GenerateRecordCallTarget(masm);
2037 __ SmiToInteger32(rdx, rdx);
2038 if (FLAG_pretenuring_call_new) {
2039 // Put the AllocationSite from the feedback vector into ebx.
2040 // By adding kPointerSize we encode that we know the AllocationSite
2041 // entry is at the feedback vector slot given by rdx + 1.
2042 __ movp(rbx, FieldOperand(rbx, rdx, times_pointer_size,
2043 FixedArray::kHeaderSize + kPointerSize));
2045 Label feedback_register_initialized;
2046 // Put the AllocationSite from the feedback vector into rbx, or undefined.
2047 __ movp(rbx, FieldOperand(rbx, rdx, times_pointer_size,
2048 FixedArray::kHeaderSize));
2049 __ CompareRoot(FieldOperand(rbx, 0), Heap::kAllocationSiteMapRootIndex);
2050 __ j(equal, &feedback_register_initialized);
2051 __ LoadRoot(rbx, Heap::kUndefinedValueRootIndex);
2052 __ bind(&feedback_register_initialized);
2055 __ AssertUndefinedOrAllocationSite(rbx);
2058 // Pass original constructor to construct stub.
2059 if (IsSuperConstructorCall()) {
2060 __ movp(rdx, Operand(rsp, rax, times_pointer_size, 2 * kPointerSize));
2065 // Jump to the function-specific construct stub.
2066 Register jmp_reg = rcx;
2067 __ movp(jmp_reg, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset));
2068 __ movp(jmp_reg, FieldOperand(jmp_reg,
2069 SharedFunctionInfo::kConstructStubOffset));
2070 __ leap(jmp_reg, FieldOperand(jmp_reg, Code::kHeaderSize));
2073 // rdi: called object
2074 // rax: number of arguments
2078 __ CmpInstanceType(rcx, JS_FUNCTION_PROXY_TYPE);
2079 __ j(not_equal, &non_function_call);
2080 __ GetBuiltinEntry(rdx, Builtins::CALL_FUNCTION_PROXY_AS_CONSTRUCTOR);
2083 __ bind(&non_function_call);
2084 __ GetBuiltinEntry(rdx, Builtins::CALL_NON_FUNCTION_AS_CONSTRUCTOR);
2086 // Set expected number of arguments to zero (not changing rax).
2088 __ Jump(isolate()->builtins()->ArgumentsAdaptorTrampoline(),
2089 RelocInfo::CODE_TARGET);
2093 static void EmitLoadTypeFeedbackVector(MacroAssembler* masm, Register vector) {
2094 __ movp(vector, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
2095 __ movp(vector, FieldOperand(vector, JSFunction::kSharedFunctionInfoOffset));
2096 __ movp(vector, FieldOperand(vector,
2097 SharedFunctionInfo::kFeedbackVectorOffset));
2101 void CallIC_ArrayStub::Generate(MacroAssembler* masm) {
2103 // rdx - slot id (as integer)
2106 int argc = arg_count();
2107 ParameterCount actual(argc);
2109 __ SmiToInteger32(rdx, rdx);
2111 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, rcx);
2113 __ j(not_equal, &miss);
2115 __ movp(rax, Immediate(arg_count()));
2116 __ movp(rcx, FieldOperand(rbx, rdx, times_pointer_size,
2117 FixedArray::kHeaderSize));
2118 // Verify that ecx contains an AllocationSite
2119 Factory* factory = masm->isolate()->factory();
2120 __ Cmp(FieldOperand(rcx, HeapObject::kMapOffset),
2121 factory->allocation_site_map());
2122 __ j(not_equal, &miss);
2126 ArrayConstructorStub stub(masm->isolate(), arg_count());
2127 __ TailCallStub(&stub);
2132 // The slow case, we need this no matter what to complete a call after a miss.
2133 CallFunctionNoFeedback(masm,
2143 void CallICStub::Generate(MacroAssembler* masm) {
2147 Isolate* isolate = masm->isolate();
2148 const int with_types_offset =
2149 FixedArray::OffsetOfElementAt(TypeFeedbackVector::kWithTypesIndex);
2150 const int generic_offset =
2151 FixedArray::OffsetOfElementAt(TypeFeedbackVector::kGenericCountIndex);
2152 Label extra_checks_or_miss, slow_start;
2153 Label slow, non_function, wrap, cont;
2154 Label have_js_function;
2155 int argc = arg_count();
2156 StackArgumentsAccessor args(rsp, argc);
2157 ParameterCount actual(argc);
2159 // The checks. First, does rdi match the recorded monomorphic target?
2160 __ SmiToInteger32(rdx, rdx);
2162 FieldOperand(rbx, rdx, times_pointer_size, FixedArray::kHeaderSize));
2164 // We don't know that we have a weak cell. We might have a private symbol
2165 // or an AllocationSite, but the memory is safe to examine.
2166 // AllocationSite::kTransitionInfoOffset - contains a Smi or pointer to
2168 // WeakCell::kValueOffset - contains a JSFunction or Smi(0)
2169 // Symbol::kHashFieldSlot - if the low bit is 1, then the hash is not
2170 // computed, meaning that it can't appear to be a pointer. If the low bit is
2171 // 0, then hash is computed, but the 0 bit prevents the field from appearing
2173 STATIC_ASSERT(WeakCell::kSize >= kPointerSize);
2174 STATIC_ASSERT(AllocationSite::kTransitionInfoOffset ==
2175 WeakCell::kValueOffset &&
2176 WeakCell::kValueOffset == Symbol::kHashFieldSlot);
2178 __ cmpp(rdi, FieldOperand(rcx, WeakCell::kValueOffset));
2179 __ j(not_equal, &extra_checks_or_miss);
2181 // The compare above could have been a SMI/SMI comparison. Guard against this
2182 // convincing us that we have a monomorphic JSFunction.
2183 __ JumpIfSmi(rdi, &extra_checks_or_miss);
2185 __ bind(&have_js_function);
2186 if (CallAsMethod()) {
2187 EmitContinueIfStrictOrNative(masm, &cont);
2189 // Load the receiver from the stack.
2190 __ movp(rax, args.GetReceiverOperand());
2192 __ JumpIfSmi(rax, &wrap);
2194 __ CmpObjectType(rax, FIRST_SPEC_OBJECT_TYPE, rcx);
2200 __ InvokeFunction(rdi, actual, JUMP_FUNCTION, NullCallWrapper());
2203 EmitSlowCase(isolate, masm, &args, argc, &non_function);
2205 if (CallAsMethod()) {
2207 EmitWrapCase(masm, &args, &cont);
2210 __ bind(&extra_checks_or_miss);
2211 Label uninitialized, miss;
2213 __ Cmp(rcx, TypeFeedbackVector::MegamorphicSentinel(isolate));
2214 __ j(equal, &slow_start);
2216 // The following cases attempt to handle MISS cases without going to the
2218 if (FLAG_trace_ic) {
2222 __ Cmp(rcx, TypeFeedbackVector::UninitializedSentinel(isolate));
2223 __ j(equal, &uninitialized);
2225 // We are going megamorphic. If the feedback is a JSFunction, it is fine
2226 // to handle it here. More complex cases are dealt with in the runtime.
2227 __ AssertNotSmi(rcx);
2228 __ CmpObjectType(rcx, JS_FUNCTION_TYPE, rcx);
2229 __ j(not_equal, &miss);
2230 __ Move(FieldOperand(rbx, rdx, times_pointer_size, FixedArray::kHeaderSize),
2231 TypeFeedbackVector::MegamorphicSentinel(isolate));
2232 // We have to update statistics for runtime profiling.
2233 __ SmiAddConstant(FieldOperand(rbx, with_types_offset), Smi::FromInt(-1));
2234 __ SmiAddConstant(FieldOperand(rbx, generic_offset), Smi::FromInt(1));
2235 __ jmp(&slow_start);
2237 __ bind(&uninitialized);
2239 // We are going monomorphic, provided we actually have a JSFunction.
2240 __ JumpIfSmi(rdi, &miss);
2242 // Goto miss case if we do not have a function.
2243 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
2244 __ j(not_equal, &miss);
2246 // Make sure the function is not the Array() function, which requires special
2247 // behavior on MISS.
2248 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, rcx);
2253 __ SmiAddConstant(FieldOperand(rbx, with_types_offset), Smi::FromInt(1));
2255 // Store the function. Use a stub since we need a frame for allocation.
2257 // rdx - slot (needs to be in smi form)
2260 FrameScope scope(masm, StackFrame::INTERNAL);
2261 CreateWeakCellStub create_stub(isolate);
2263 __ Integer32ToSmi(rdx, rdx);
2265 __ CallStub(&create_stub);
2269 __ jmp(&have_js_function);
2271 // We are here because tracing is on or we encountered a MISS case we can't
2277 __ bind(&slow_start);
2278 // Check that function is not a smi.
2279 __ JumpIfSmi(rdi, &non_function);
2280 // Check that function is a JSFunction.
2281 __ CmpObjectType(rdi, JS_FUNCTION_TYPE, rcx);
2282 __ j(not_equal, &slow);
2283 __ jmp(&have_js_function);
2290 void CallICStub::GenerateMiss(MacroAssembler* masm) {
2291 FrameScope scope(masm, StackFrame::INTERNAL);
2293 // Push the receiver and the function and feedback info.
2296 __ Integer32ToSmi(rdx, rdx);
2300 IC::UtilityId id = GetICState() == DEFAULT ? IC::kCallIC_Miss
2301 : IC::kCallIC_Customization_Miss;
2303 ExternalReference miss = ExternalReference(IC_Utility(id), masm->isolate());
2304 __ CallExternalReference(miss, 3);
2306 // Move result to edi and exit the internal frame.
2311 bool CEntryStub::NeedsImmovableCode() {
2316 void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) {
2317 CEntryStub::GenerateAheadOfTime(isolate);
2318 StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate);
2319 StubFailureTrampolineStub::GenerateAheadOfTime(isolate);
2320 // It is important that the store buffer overflow stubs are generated first.
2321 ArrayConstructorStubBase::GenerateStubsAheadOfTime(isolate);
2322 CreateAllocationSiteStub::GenerateAheadOfTime(isolate);
2323 CreateWeakCellStub::GenerateAheadOfTime(isolate);
2324 BinaryOpICStub::GenerateAheadOfTime(isolate);
2325 BinaryOpICWithAllocationSiteStub::GenerateAheadOfTime(isolate);
2329 void CodeStub::GenerateFPStubs(Isolate* isolate) {
2333 void CEntryStub::GenerateAheadOfTime(Isolate* isolate) {
2334 CEntryStub stub(isolate, 1, kDontSaveFPRegs);
2336 CEntryStub save_doubles(isolate, 1, kSaveFPRegs);
2337 save_doubles.GetCode();
2341 void CEntryStub::Generate(MacroAssembler* masm) {
2342 // rax: number of arguments including receiver
2343 // rbx: pointer to C function (C callee-saved)
2344 // rbp: frame pointer of calling JS frame (restored after C call)
2345 // rsp: stack pointer (restored after C call)
2346 // rsi: current context (restored)
2348 ProfileEntryHookStub::MaybeCallEntryHook(masm);
2350 // Enter the exit frame that transitions from JavaScript to C++.
2352 int arg_stack_space = (result_size() < 2 ? 2 : 4);
2354 int arg_stack_space = 0;
2356 __ EnterExitFrame(arg_stack_space, save_doubles());
2358 // rbx: pointer to builtin function (C callee-saved).
2359 // rbp: frame pointer of exit frame (restored after C call).
2360 // rsp: stack pointer (restored after C call).
2361 // r14: number of arguments including receiver (C callee-saved).
2362 // r15: argv pointer (C callee-saved).
2364 // Simple results returned in rax (both AMD64 and Win64 calling conventions).
2365 // Complex results must be written to address passed as first argument.
2366 // AMD64 calling convention: a struct of two pointers in rax+rdx
2368 // Check stack alignment.
2369 if (FLAG_debug_code) {
2370 __ CheckStackAlignment();
2375 // Windows 64-bit ABI passes arguments in rcx, rdx, r8, r9.
2376 // Pass argv and argc as two parameters. The arguments object will
2377 // be created by stubs declared by DECLARE_RUNTIME_FUNCTION().
2378 if (result_size() < 2) {
2379 // Pass a pointer to the Arguments object as the first argument.
2380 // Return result in single register (rax).
2381 __ movp(rcx, r14); // argc.
2382 __ movp(rdx, r15); // argv.
2383 __ Move(r8, ExternalReference::isolate_address(isolate()));
2385 DCHECK_EQ(2, result_size());
2386 // Pass a pointer to the result location as the first argument.
2387 __ leap(rcx, StackSpaceOperand(2));
2388 // Pass a pointer to the Arguments object as the second argument.
2389 __ movp(rdx, r14); // argc.
2390 __ movp(r8, r15); // argv.
2391 __ Move(r9, ExternalReference::isolate_address(isolate()));
2395 // GCC passes arguments in rdi, rsi, rdx, rcx, r8, r9.
2396 __ movp(rdi, r14); // argc.
2397 __ movp(rsi, r15); // argv.
2398 __ Move(rdx, ExternalReference::isolate_address(isolate()));
2401 // Result is in rax - do not destroy this register!
2404 // If return value is on the stack, pop it to registers.
2405 if (result_size() > 1) {
2406 DCHECK_EQ(2, result_size());
2407 // Read result values stored on stack. Result is stored
2408 // above the four argument mirror slots and the two
2409 // Arguments object slots.
2410 __ movq(rax, Operand(rsp, 6 * kRegisterSize));
2411 __ movq(rdx, Operand(rsp, 7 * kRegisterSize));
2415 // Runtime functions should not return 'the hole'. Allowing it to escape may
2416 // lead to crashes in the IC code later.
2417 if (FLAG_debug_code) {
2419 __ CompareRoot(rax, Heap::kTheHoleValueRootIndex);
2420 __ j(not_equal, &okay, Label::kNear);
2425 // Check result for exception sentinel.
2426 Label exception_returned;
2427 __ CompareRoot(rax, Heap::kExceptionRootIndex);
2428 __ j(equal, &exception_returned);
2430 ExternalReference pending_exception_address(
2431 Isolate::kPendingExceptionAddress, isolate());
2433 // Check that there is no pending exception, otherwise we
2434 // should have returned the exception sentinel.
2435 if (FLAG_debug_code) {
2437 __ LoadRoot(r14, Heap::kTheHoleValueRootIndex);
2438 Operand pending_exception_operand =
2439 masm->ExternalOperand(pending_exception_address);
2440 __ cmpp(r14, pending_exception_operand);
2441 __ j(equal, &okay, Label::kNear);
2446 // Exit the JavaScript to C++ exit frame.
2447 __ LeaveExitFrame(save_doubles());
2450 // Handling of exception.
2451 __ bind(&exception_returned);
2453 // Retrieve the pending exception.
2454 Operand pending_exception_operand =
2455 masm->ExternalOperand(pending_exception_address);
2456 __ movp(rax, pending_exception_operand);
2458 // Clear the pending exception.
2459 __ LoadRoot(rdx, Heap::kTheHoleValueRootIndex);
2460 __ movp(pending_exception_operand, rdx);
2462 // Special handling of termination exceptions which are uncatchable
2463 // by javascript code.
2464 Label throw_termination_exception;
2465 __ CompareRoot(rax, Heap::kTerminationExceptionRootIndex);
2466 __ j(equal, &throw_termination_exception);
2468 // Handle normal exception.
2471 __ bind(&throw_termination_exception);
2472 __ ThrowUncatchable(rax);
2476 void JSEntryStub::Generate(MacroAssembler* masm) {
2477 Label invoke, handler_entry, exit;
2478 Label not_outermost_js, not_outermost_js_2;
2480 ProfileEntryHookStub::MaybeCallEntryHook(masm);
2482 { // NOLINT. Scope block confuses linter.
2483 MacroAssembler::NoRootArrayScope uninitialized_root_register(masm);
2488 // Push the stack frame type marker twice.
2489 int marker = type();
2490 // Scratch register is neither callee-save, nor an argument register on any
2491 // platform. It's free to use at this point.
2492 // Cannot use smi-register for loading yet.
2493 __ Move(kScratchRegister, Smi::FromInt(marker), Assembler::RelocInfoNone());
2494 __ Push(kScratchRegister); // context slot
2495 __ Push(kScratchRegister); // function slot
2496 // Save callee-saved registers (X64/X32/Win64 calling conventions).
2502 __ pushq(rdi); // Only callee save in Win64 ABI, argument in AMD64 ABI.
2503 __ pushq(rsi); // Only callee save in Win64 ABI, argument in AMD64 ABI.
2508 // On Win64 XMM6-XMM15 are callee-save
2509 __ subp(rsp, Immediate(EntryFrameConstants::kXMMRegistersBlockSize));
2510 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 0), xmm6);
2511 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 1), xmm7);
2512 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 2), xmm8);
2513 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 3), xmm9);
2514 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 4), xmm10);
2515 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 5), xmm11);
2516 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 6), xmm12);
2517 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 7), xmm13);
2518 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 8), xmm14);
2519 __ movdqu(Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 9), xmm15);
2522 // Set up the roots and smi constant registers.
2523 // Needs to be done before any further smi loads.
2524 __ InitializeSmiConstantRegister();
2525 __ InitializeRootRegister();
2528 // Save copies of the top frame descriptor on the stack.
2529 ExternalReference c_entry_fp(Isolate::kCEntryFPAddress, isolate());
2531 Operand c_entry_fp_operand = masm->ExternalOperand(c_entry_fp);
2532 __ Push(c_entry_fp_operand);
2535 // If this is the outermost JS call, set js_entry_sp value.
2536 ExternalReference js_entry_sp(Isolate::kJSEntrySPAddress, isolate());
2537 __ Load(rax, js_entry_sp);
2539 __ j(not_zero, ¬_outermost_js);
2540 __ Push(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME));
2542 __ Store(js_entry_sp, rax);
2545 __ bind(¬_outermost_js);
2546 __ Push(Smi::FromInt(StackFrame::INNER_JSENTRY_FRAME));
2549 // Jump to a faked try block that does the invoke, with a faked catch
2550 // block that sets the pending exception.
2552 __ bind(&handler_entry);
2553 handler_offset_ = handler_entry.pos();
2554 // Caught exception: Store result (exception) in the pending exception
2555 // field in the JSEnv and return a failure sentinel.
2556 ExternalReference pending_exception(Isolate::kPendingExceptionAddress,
2558 __ Store(pending_exception, rax);
2559 __ LoadRoot(rax, Heap::kExceptionRootIndex);
2562 // Invoke: Link this frame into the handler chain. There's only one
2563 // handler block in this code object, so its index is 0.
2565 __ PushTryHandler(StackHandler::JS_ENTRY, 0);
2567 // Clear any pending exceptions.
2568 __ LoadRoot(rax, Heap::kTheHoleValueRootIndex);
2569 __ Store(pending_exception, rax);
2571 // Fake a receiver (NULL).
2572 __ Push(Immediate(0)); // receiver
2574 // Invoke the function by calling through JS entry trampoline builtin and
2575 // pop the faked function when we return. We load the address from an
2576 // external reference instead of inlining the call target address directly
2577 // in the code, because the builtin stubs may not have been generated yet
2578 // at the time this code is generated.
2579 if (type() == StackFrame::ENTRY_CONSTRUCT) {
2580 ExternalReference construct_entry(Builtins::kJSConstructEntryTrampoline,
2582 __ Load(rax, construct_entry);
2584 ExternalReference entry(Builtins::kJSEntryTrampoline, isolate());
2585 __ Load(rax, entry);
2587 __ leap(kScratchRegister, FieldOperand(rax, Code::kHeaderSize));
2588 __ call(kScratchRegister);
2590 // Unlink this frame from the handler chain.
2594 // Check if the current stack frame is marked as the outermost JS frame.
2596 __ Cmp(rbx, Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME));
2597 __ j(not_equal, ¬_outermost_js_2);
2598 __ Move(kScratchRegister, js_entry_sp);
2599 __ movp(Operand(kScratchRegister, 0), Immediate(0));
2600 __ bind(¬_outermost_js_2);
2602 // Restore the top frame descriptor from the stack.
2603 { Operand c_entry_fp_operand = masm->ExternalOperand(c_entry_fp);
2604 __ Pop(c_entry_fp_operand);
2607 // Restore callee-saved registers (X64 conventions).
2609 // On Win64 XMM6-XMM15 are callee-save
2610 __ movdqu(xmm6, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 0));
2611 __ movdqu(xmm7, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 1));
2612 __ movdqu(xmm8, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 2));
2613 __ movdqu(xmm9, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 3));
2614 __ movdqu(xmm10, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 4));
2615 __ movdqu(xmm11, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 5));
2616 __ movdqu(xmm12, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 6));
2617 __ movdqu(xmm13, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 7));
2618 __ movdqu(xmm14, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 8));
2619 __ movdqu(xmm15, Operand(rsp, EntryFrameConstants::kXMMRegisterSize * 9));
2620 __ addp(rsp, Immediate(EntryFrameConstants::kXMMRegistersBlockSize));
2625 // Callee save on in Win64 ABI, arguments/volatile in AMD64 ABI.
2633 __ addp(rsp, Immediate(2 * kPointerSize)); // remove markers
2635 // Restore frame pointer and return.
2641 void InstanceofStub::Generate(MacroAssembler* masm) {
2642 // Implements "value instanceof function" operator.
2643 // Expected input state with no inline cache:
2644 // rsp[0] : return address
2645 // rsp[8] : function pointer
2647 // Expected input state with an inline one-element cache:
2648 // rsp[0] : return address
2649 // rsp[8] : offset from return address to location of inline cache
2650 // rsp[16] : function pointer
2652 // Returns a bitwise zero to indicate that the value
2653 // is and instance of the function and anything else to
2654 // indicate that the value is not an instance.
2656 // Fixed register usage throughout the stub.
2657 Register object = rax; // Object (lhs).
2658 Register map = rbx; // Map of the object.
2659 Register function = rdx; // Function (rhs).
2660 Register prototype = rdi; // Prototype of the function.
2661 Register scratch = rcx;
2663 static const int kOffsetToMapCheckValue = 2;
2664 static const int kOffsetToResultValue = kPointerSize == kInt64Size ? 18 : 14;
2665 // The last 4 bytes of the instruction sequence
2666 // movp(rdi, FieldOperand(rax, HeapObject::kMapOffset))
2667 // Move(kScratchRegister, Factory::the_hole_value())
2668 // in front of the hole value address.
2669 static const unsigned int kWordBeforeMapCheckValue =
2670 kPointerSize == kInt64Size ? 0xBA49FF78 : 0xBA41FF78;
2671 // The last 4 bytes of the instruction sequence
2672 // __ j(not_equal, &cache_miss);
2673 // __ LoadRoot(ToRegister(instr->result()), Heap::kTheHoleValueRootIndex);
2674 // before the offset of the hole value in the root array.
2675 static const unsigned int kWordBeforeResultValue =
2676 kPointerSize == kInt64Size ? 0x458B4906 : 0x458B4106;
2678 int extra_argument_offset = HasCallSiteInlineCheck() ? 1 : 0;
2680 DCHECK_EQ(object.code(), InstanceofStub::left().code());
2681 DCHECK_EQ(function.code(), InstanceofStub::right().code());
2683 // Get the object and function - they are always both needed.
2684 // Go slow case if the object is a smi.
2686 StackArgumentsAccessor args(rsp, 2 + extra_argument_offset,
2687 ARGUMENTS_DONT_CONTAIN_RECEIVER);
2688 if (!HasArgsInRegisters()) {
2689 __ movp(object, args.GetArgumentOperand(0));
2690 __ movp(function, args.GetArgumentOperand(1));
2692 __ JumpIfSmi(object, &slow);
2694 // Check that the left hand is a JS object. Leave its map in rax.
2695 __ CmpObjectType(object, FIRST_SPEC_OBJECT_TYPE, map);
2697 __ CmpInstanceType(map, LAST_SPEC_OBJECT_TYPE);
2700 // If there is a call site cache don't look in the global cache, but do the
2701 // real lookup and update the call site cache.
2702 if (!HasCallSiteInlineCheck() && !ReturnTrueFalseObject()) {
2703 // Look up the function and the map in the instanceof cache.
2705 __ CompareRoot(function, Heap::kInstanceofCacheFunctionRootIndex);
2706 __ j(not_equal, &miss, Label::kNear);
2707 __ CompareRoot(map, Heap::kInstanceofCacheMapRootIndex);
2708 __ j(not_equal, &miss, Label::kNear);
2709 __ LoadRoot(rax, Heap::kInstanceofCacheAnswerRootIndex);
2710 __ ret((HasArgsInRegisters() ? 0 : 2) * kPointerSize);
2714 // Get the prototype of the function.
2715 __ TryGetFunctionPrototype(function, prototype, &slow, true);
2717 // Check that the function prototype is a JS object.
2718 __ JumpIfSmi(prototype, &slow);
2719 __ CmpObjectType(prototype, FIRST_SPEC_OBJECT_TYPE, kScratchRegister);
2721 __ CmpInstanceType(kScratchRegister, LAST_SPEC_OBJECT_TYPE);
2724 // Update the global instanceof or call site inlined cache with the current
2725 // map and function. The cached answer will be set when it is known below.
2726 if (!HasCallSiteInlineCheck()) {
2727 __ StoreRoot(function, Heap::kInstanceofCacheFunctionRootIndex);
2728 __ StoreRoot(map, Heap::kInstanceofCacheMapRootIndex);
2730 // The constants for the code patching are based on push instructions
2731 // at the call site.
2732 DCHECK(!HasArgsInRegisters());
2733 // Get return address and delta to inlined map check.
2734 __ movq(kScratchRegister, StackOperandForReturnAddress(0));
2735 __ subp(kScratchRegister, args.GetArgumentOperand(2));
2736 if (FLAG_debug_code) {
2737 __ movl(scratch, Immediate(kWordBeforeMapCheckValue));
2738 __ cmpl(Operand(kScratchRegister, kOffsetToMapCheckValue - 4), scratch);
2739 __ Assert(equal, kInstanceofStubUnexpectedCallSiteCacheCheck);
2741 __ movp(kScratchRegister,
2742 Operand(kScratchRegister, kOffsetToMapCheckValue));
2743 __ movp(Operand(kScratchRegister, 0), map);
2746 // Loop through the prototype chain looking for the function prototype.
2747 __ movp(scratch, FieldOperand(map, Map::kPrototypeOffset));
2748 Label loop, is_instance, is_not_instance;
2749 __ LoadRoot(kScratchRegister, Heap::kNullValueRootIndex);
2751 __ cmpp(scratch, prototype);
2752 __ j(equal, &is_instance, Label::kNear);
2753 __ cmpp(scratch, kScratchRegister);
2754 // The code at is_not_instance assumes that kScratchRegister contains a
2755 // non-zero GCable value (the null object in this case).
2756 __ j(equal, &is_not_instance, Label::kNear);
2757 __ movp(scratch, FieldOperand(scratch, HeapObject::kMapOffset));
2758 __ movp(scratch, FieldOperand(scratch, Map::kPrototypeOffset));
2761 __ bind(&is_instance);
2762 if (!HasCallSiteInlineCheck()) {
2764 // Store bitwise zero in the cache. This is a Smi in GC terms.
2765 STATIC_ASSERT(kSmiTag == 0);
2766 __ StoreRoot(rax, Heap::kInstanceofCacheAnswerRootIndex);
2767 if (ReturnTrueFalseObject()) {
2768 __ LoadRoot(rax, Heap::kTrueValueRootIndex);
2771 // Store offset of true in the root array at the inline check site.
2772 int true_offset = 0x100 +
2773 (Heap::kTrueValueRootIndex << kPointerSizeLog2) - kRootRegisterBias;
2774 // Assert it is a 1-byte signed value.
2775 DCHECK(true_offset >= 0 && true_offset < 0x100);
2776 __ movl(rax, Immediate(true_offset));
2777 __ movq(kScratchRegister, StackOperandForReturnAddress(0));
2778 __ subp(kScratchRegister, args.GetArgumentOperand(2));
2779 __ movb(Operand(kScratchRegister, kOffsetToResultValue), rax);
2780 if (FLAG_debug_code) {
2781 __ movl(rax, Immediate(kWordBeforeResultValue));
2782 __ cmpl(Operand(kScratchRegister, kOffsetToResultValue - 4), rax);
2783 __ Assert(equal, kInstanceofStubUnexpectedCallSiteCacheMov);
2785 if (!ReturnTrueFalseObject()) {
2789 __ ret(((HasArgsInRegisters() ? 0 : 2) + extra_argument_offset) *
2792 __ bind(&is_not_instance);
2793 if (!HasCallSiteInlineCheck()) {
2794 // We have to store a non-zero value in the cache.
2795 __ StoreRoot(kScratchRegister, Heap::kInstanceofCacheAnswerRootIndex);
2796 if (ReturnTrueFalseObject()) {
2797 __ LoadRoot(rax, Heap::kFalseValueRootIndex);
2800 // Store offset of false in the root array at the inline check site.
2801 int false_offset = 0x100 +
2802 (Heap::kFalseValueRootIndex << kPointerSizeLog2) - kRootRegisterBias;
2803 // Assert it is a 1-byte signed value.
2804 DCHECK(false_offset >= 0 && false_offset < 0x100);
2805 __ movl(rax, Immediate(false_offset));
2806 __ movq(kScratchRegister, StackOperandForReturnAddress(0));
2807 __ subp(kScratchRegister, args.GetArgumentOperand(2));
2808 __ movb(Operand(kScratchRegister, kOffsetToResultValue), rax);
2809 if (FLAG_debug_code) {
2810 __ movl(rax, Immediate(kWordBeforeResultValue));
2811 __ cmpl(Operand(kScratchRegister, kOffsetToResultValue - 4), rax);
2812 __ Assert(equal, kInstanceofStubUnexpectedCallSiteCacheMov);
2815 __ ret(((HasArgsInRegisters() ? 0 : 2) + extra_argument_offset) *
2818 // Slow-case: Go through the JavaScript implementation.
2820 if (!ReturnTrueFalseObject()) {
2821 // Tail call the builtin which returns 0 or 1.
2822 DCHECK(!HasArgsInRegisters());
2823 if (HasCallSiteInlineCheck()) {
2824 // Remove extra value from the stack.
2825 __ PopReturnAddressTo(rcx);
2827 __ PushReturnAddressFrom(rcx);
2829 __ InvokeBuiltin(Builtins::INSTANCE_OF, JUMP_FUNCTION);
2831 // Call the builtin and convert 0/1 to true/false.
2833 FrameScope scope(masm, StackFrame::INTERNAL);
2836 __ InvokeBuiltin(Builtins::INSTANCE_OF, CALL_FUNCTION);
2838 Label true_value, done;
2840 __ j(zero, &true_value, Label::kNear);
2841 __ LoadRoot(rax, Heap::kFalseValueRootIndex);
2842 __ jmp(&done, Label::kNear);
2843 __ bind(&true_value);
2844 __ LoadRoot(rax, Heap::kTrueValueRootIndex);
2846 __ ret(((HasArgsInRegisters() ? 0 : 2) + extra_argument_offset) *
2852 // -------------------------------------------------------------------------
2853 // StringCharCodeAtGenerator
2855 void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) {
2856 // If the receiver is a smi trigger the non-string case.
2857 if (check_mode_ == RECEIVER_IS_UNKNOWN) {
2858 __ JumpIfSmi(object_, receiver_not_string_);
2860 // Fetch the instance type of the receiver into result register.
2861 __ movp(result_, FieldOperand(object_, HeapObject::kMapOffset));
2862 __ movzxbl(result_, FieldOperand(result_, Map::kInstanceTypeOffset));
2863 // If the receiver is not a string trigger the non-string case.
2864 __ testb(result_, Immediate(kIsNotStringMask));
2865 __ j(not_zero, receiver_not_string_);
2868 // If the index is non-smi trigger the non-smi case.
2869 __ JumpIfNotSmi(index_, &index_not_smi_);
2870 __ bind(&got_smi_index_);
2872 // Check for index out of range.
2873 __ SmiCompare(index_, FieldOperand(object_, String::kLengthOffset));
2874 __ j(above_equal, index_out_of_range_);
2876 __ SmiToInteger32(index_, index_);
2878 StringCharLoadGenerator::Generate(
2879 masm, object_, index_, result_, &call_runtime_);
2881 __ Integer32ToSmi(result_, result_);
2886 void StringCharCodeAtGenerator::GenerateSlow(
2887 MacroAssembler* masm,
2888 const RuntimeCallHelper& call_helper) {
2889 __ Abort(kUnexpectedFallthroughToCharCodeAtSlowCase);
2891 Factory* factory = masm->isolate()->factory();
2892 // Index is not a smi.
2893 __ bind(&index_not_smi_);
2894 // If index is a heap number, try converting it to an integer.
2896 factory->heap_number_map(),
2899 call_helper.BeforeCall(masm);
2901 __ Push(index_); // Consumed by runtime conversion function.
2902 if (index_flags_ == STRING_INDEX_IS_NUMBER) {
2903 __ CallRuntime(Runtime::kNumberToIntegerMapMinusZero, 1);
2905 DCHECK(index_flags_ == STRING_INDEX_IS_ARRAY_INDEX);
2906 // NumberToSmi discards numbers that are not exact integers.
2907 __ CallRuntime(Runtime::kNumberToSmi, 1);
2909 if (!index_.is(rax)) {
2910 // Save the conversion result before the pop instructions below
2911 // have a chance to overwrite it.
2912 __ movp(index_, rax);
2915 // Reload the instance type.
2916 __ movp(result_, FieldOperand(object_, HeapObject::kMapOffset));
2917 __ movzxbl(result_, FieldOperand(result_, Map::kInstanceTypeOffset));
2918 call_helper.AfterCall(masm);
2919 // If index is still not a smi, it must be out of range.
2920 __ JumpIfNotSmi(index_, index_out_of_range_);
2921 // Otherwise, return to the fast path.
2922 __ jmp(&got_smi_index_);
2924 // Call runtime. We get here when the receiver is a string and the
2925 // index is a number, but the code of getting the actual character
2926 // is too complex (e.g., when the string needs to be flattened).
2927 __ bind(&call_runtime_);
2928 call_helper.BeforeCall(masm);
2930 __ Integer32ToSmi(index_, index_);
2932 __ CallRuntime(Runtime::kStringCharCodeAtRT, 2);
2933 if (!result_.is(rax)) {
2934 __ movp(result_, rax);
2936 call_helper.AfterCall(masm);
2939 __ Abort(kUnexpectedFallthroughFromCharCodeAtSlowCase);
2943 // -------------------------------------------------------------------------
2944 // StringCharFromCodeGenerator
2946 void StringCharFromCodeGenerator::GenerateFast(MacroAssembler* masm) {
2947 // Fast case of Heap::LookupSingleCharacterStringFromCode.
2948 __ JumpIfNotSmi(code_, &slow_case_);
2949 __ SmiCompare(code_, Smi::FromInt(String::kMaxOneByteCharCode));
2950 __ j(above, &slow_case_);
2952 __ LoadRoot(result_, Heap::kSingleCharacterStringCacheRootIndex);
2953 SmiIndex index = masm->SmiToIndex(kScratchRegister, code_, kPointerSizeLog2);
2954 __ movp(result_, FieldOperand(result_, index.reg, index.scale,
2955 FixedArray::kHeaderSize));
2956 __ CompareRoot(result_, Heap::kUndefinedValueRootIndex);
2957 __ j(equal, &slow_case_);
2962 void StringCharFromCodeGenerator::GenerateSlow(
2963 MacroAssembler* masm,
2964 const RuntimeCallHelper& call_helper) {
2965 __ Abort(kUnexpectedFallthroughToCharFromCodeSlowCase);
2967 __ bind(&slow_case_);
2968 call_helper.BeforeCall(masm);
2970 __ CallRuntime(Runtime::kCharFromCode, 1);
2971 if (!result_.is(rax)) {
2972 __ movp(result_, rax);
2974 call_helper.AfterCall(masm);
2977 __ Abort(kUnexpectedFallthroughFromCharFromCodeSlowCase);
2981 void StringHelper::GenerateCopyCharacters(MacroAssembler* masm,
2985 String::Encoding encoding) {
2986 // Nothing to do for zero characters.
2988 __ testl(count, count);
2989 __ j(zero, &done, Label::kNear);
2991 // Make count the number of bytes to copy.
2992 if (encoding == String::TWO_BYTE_ENCODING) {
2993 STATIC_ASSERT(2 == sizeof(uc16));
2994 __ addl(count, count);
2997 // Copy remaining characters.
3000 __ movb(kScratchRegister, Operand(src, 0));
3001 __ movb(Operand(dest, 0), kScratchRegister);
3005 __ j(not_zero, &loop);
3011 void SubStringStub::Generate(MacroAssembler* masm) {
3014 // Stack frame on entry.
3015 // rsp[0] : return address
3020 enum SubStringStubArgumentIndices {
3021 STRING_ARGUMENT_INDEX,
3022 FROM_ARGUMENT_INDEX,
3024 SUB_STRING_ARGUMENT_COUNT
3027 StackArgumentsAccessor args(rsp, SUB_STRING_ARGUMENT_COUNT,
3028 ARGUMENTS_DONT_CONTAIN_RECEIVER);
3030 // Make sure first argument is a string.
3031 __ movp(rax, args.GetArgumentOperand(STRING_ARGUMENT_INDEX));
3032 STATIC_ASSERT(kSmiTag == 0);
3033 __ testl(rax, Immediate(kSmiTagMask));
3034 __ j(zero, &runtime);
3035 Condition is_string = masm->IsObjectStringType(rax, rbx, rbx);
3036 __ j(NegateCondition(is_string), &runtime);
3039 // rbx: instance type
3040 // Calculate length of sub string using the smi values.
3041 __ movp(rcx, args.GetArgumentOperand(TO_ARGUMENT_INDEX));
3042 __ movp(rdx, args.GetArgumentOperand(FROM_ARGUMENT_INDEX));
3043 __ JumpUnlessBothNonNegativeSmi(rcx, rdx, &runtime);
3045 __ SmiSub(rcx, rcx, rdx); // Overflow doesn't happen.
3046 __ cmpp(rcx, FieldOperand(rax, String::kLengthOffset));
3047 Label not_original_string;
3048 // Shorter than original string's length: an actual substring.
3049 __ j(below, ¬_original_string, Label::kNear);
3050 // Longer than original string's length or negative: unsafe arguments.
3051 __ j(above, &runtime);
3052 // Return original string.
3053 Counters* counters = isolate()->counters();
3054 __ IncrementCounter(counters->sub_string_native(), 1);
3055 __ ret(SUB_STRING_ARGUMENT_COUNT * kPointerSize);
3056 __ bind(¬_original_string);
3059 __ SmiCompare(rcx, Smi::FromInt(1));
3060 __ j(equal, &single_char);
3062 __ SmiToInteger32(rcx, rcx);
3065 // rbx: instance type
3066 // rcx: sub string length
3067 // rdx: from index (smi)
3068 // Deal with different string types: update the index if necessary
3069 // and put the underlying string into edi.
3070 Label underlying_unpacked, sliced_string, seq_or_external_string;
3071 // If the string is not indirect, it can only be sequential or external.
3072 STATIC_ASSERT(kIsIndirectStringMask == (kSlicedStringTag & kConsStringTag));
3073 STATIC_ASSERT(kIsIndirectStringMask != 0);
3074 __ testb(rbx, Immediate(kIsIndirectStringMask));
3075 __ j(zero, &seq_or_external_string, Label::kNear);
3077 __ testb(rbx, Immediate(kSlicedNotConsMask));
3078 __ j(not_zero, &sliced_string, Label::kNear);
3079 // Cons string. Check whether it is flat, then fetch first part.
3080 // Flat cons strings have an empty second part.
3081 __ CompareRoot(FieldOperand(rax, ConsString::kSecondOffset),
3082 Heap::kempty_stringRootIndex);
3083 __ j(not_equal, &runtime);
3084 __ movp(rdi, FieldOperand(rax, ConsString::kFirstOffset));
3085 // Update instance type.
3086 __ movp(rbx, FieldOperand(rdi, HeapObject::kMapOffset));
3087 __ movzxbl(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset));
3088 __ jmp(&underlying_unpacked, Label::kNear);
3090 __ bind(&sliced_string);
3091 // Sliced string. Fetch parent and correct start index by offset.
3092 __ addp(rdx, FieldOperand(rax, SlicedString::kOffsetOffset));
3093 __ movp(rdi, FieldOperand(rax, SlicedString::kParentOffset));
3094 // Update instance type.
3095 __ movp(rbx, FieldOperand(rdi, HeapObject::kMapOffset));
3096 __ movzxbl(rbx, FieldOperand(rbx, Map::kInstanceTypeOffset));
3097 __ jmp(&underlying_unpacked, Label::kNear);
3099 __ bind(&seq_or_external_string);
3100 // Sequential or external string. Just move string to the correct register.
3103 __ bind(&underlying_unpacked);
3105 if (FLAG_string_slices) {
3107 // rdi: underlying subject string
3108 // rbx: instance type of underlying subject string
3109 // rdx: adjusted start index (smi)
3111 // If coming from the make_two_character_string path, the string
3112 // is too short to be sliced anyways.
3113 __ cmpp(rcx, Immediate(SlicedString::kMinLength));
3114 // Short slice. Copy instead of slicing.
3115 __ j(less, ©_routine);
3116 // Allocate new sliced string. At this point we do not reload the instance
3117 // type including the string encoding because we simply rely on the info
3118 // provided by the original string. It does not matter if the original
3119 // string's encoding is wrong because we always have to recheck encoding of
3120 // the newly created string's parent anyways due to externalized strings.
3121 Label two_byte_slice, set_slice_header;
3122 STATIC_ASSERT((kStringEncodingMask & kOneByteStringTag) != 0);
3123 STATIC_ASSERT((kStringEncodingMask & kTwoByteStringTag) == 0);
3124 __ testb(rbx, Immediate(kStringEncodingMask));
3125 __ j(zero, &two_byte_slice, Label::kNear);
3126 __ AllocateOneByteSlicedString(rax, rbx, r14, &runtime);
3127 __ jmp(&set_slice_header, Label::kNear);
3128 __ bind(&two_byte_slice);
3129 __ AllocateTwoByteSlicedString(rax, rbx, r14, &runtime);
3130 __ bind(&set_slice_header);
3131 __ Integer32ToSmi(rcx, rcx);
3132 __ movp(FieldOperand(rax, SlicedString::kLengthOffset), rcx);
3133 __ movp(FieldOperand(rax, SlicedString::kHashFieldOffset),
3134 Immediate(String::kEmptyHashField));
3135 __ movp(FieldOperand(rax, SlicedString::kParentOffset), rdi);
3136 __ movp(FieldOperand(rax, SlicedString::kOffsetOffset), rdx);
3137 __ IncrementCounter(counters->sub_string_native(), 1);
3138 __ ret(3 * kPointerSize);
3140 __ bind(©_routine);
3143 // rdi: underlying subject string
3144 // rbx: instance type of underlying subject string
3145 // rdx: adjusted start index (smi)
3147 // The subject string can only be external or sequential string of either
3148 // encoding at this point.
3149 Label two_byte_sequential, sequential_string;
3150 STATIC_ASSERT(kExternalStringTag != 0);
3151 STATIC_ASSERT(kSeqStringTag == 0);
3152 __ testb(rbx, Immediate(kExternalStringTag));
3153 __ j(zero, &sequential_string);
3155 // Handle external string.
3156 // Rule out short external strings.
3157 STATIC_ASSERT(kShortExternalStringTag != 0);
3158 __ testb(rbx, Immediate(kShortExternalStringMask));
3159 __ j(not_zero, &runtime);
3160 __ movp(rdi, FieldOperand(rdi, ExternalString::kResourceDataOffset));
3161 // Move the pointer so that offset-wise, it looks like a sequential string.
3162 STATIC_ASSERT(SeqTwoByteString::kHeaderSize == SeqOneByteString::kHeaderSize);
3163 __ subp(rdi, Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
3165 __ bind(&sequential_string);
3166 STATIC_ASSERT((kOneByteStringTag & kStringEncodingMask) != 0);
3167 __ testb(rbx, Immediate(kStringEncodingMask));
3168 __ j(zero, &two_byte_sequential);
3170 // Allocate the result.
3171 __ AllocateOneByteString(rax, rcx, r11, r14, r15, &runtime);
3173 // rax: result string
3174 // rcx: result string length
3175 { // Locate character of sub string start.
3176 SmiIndex smi_as_index = masm->SmiToIndex(rdx, rdx, times_1);
3177 __ leap(r14, Operand(rdi, smi_as_index.reg, smi_as_index.scale,
3178 SeqOneByteString::kHeaderSize - kHeapObjectTag));
3180 // Locate first character of result.
3181 __ leap(rdi, FieldOperand(rax, SeqOneByteString::kHeaderSize));
3183 // rax: result string
3184 // rcx: result length
3185 // r14: first character of result
3186 // rsi: character of sub string start
3187 StringHelper::GenerateCopyCharacters(
3188 masm, rdi, r14, rcx, String::ONE_BYTE_ENCODING);
3189 __ IncrementCounter(counters->sub_string_native(), 1);
3190 __ ret(SUB_STRING_ARGUMENT_COUNT * kPointerSize);
3192 __ bind(&two_byte_sequential);
3193 // Allocate the result.
3194 __ AllocateTwoByteString(rax, rcx, r11, r14, r15, &runtime);
3196 // rax: result string
3197 // rcx: result string length
3198 { // Locate character of sub string start.
3199 SmiIndex smi_as_index = masm->SmiToIndex(rdx, rdx, times_2);
3200 __ leap(r14, Operand(rdi, smi_as_index.reg, smi_as_index.scale,
3201 SeqOneByteString::kHeaderSize - kHeapObjectTag));
3203 // Locate first character of result.
3204 __ leap(rdi, FieldOperand(rax, SeqTwoByteString::kHeaderSize));
3206 // rax: result string
3207 // rcx: result length
3208 // rdi: first character of result
3209 // r14: character of sub string start
3210 StringHelper::GenerateCopyCharacters(
3211 masm, rdi, r14, rcx, String::TWO_BYTE_ENCODING);
3212 __ IncrementCounter(counters->sub_string_native(), 1);
3213 __ ret(SUB_STRING_ARGUMENT_COUNT * kPointerSize);
3215 // Just jump to runtime to create the sub string.
3217 __ TailCallRuntime(Runtime::kSubString, 3, 1);
3219 __ bind(&single_char);
3221 // rbx: instance type
3222 // rcx: sub string length (smi)
3223 // rdx: from index (smi)
3224 StringCharAtGenerator generator(rax, rdx, rcx, rax, &runtime, &runtime,
3225 &runtime, STRING_INDEX_IS_NUMBER,
3226 RECEIVER_IS_STRING);
3227 generator.GenerateFast(masm);
3228 __ ret(SUB_STRING_ARGUMENT_COUNT * kPointerSize);
3229 generator.SkipSlow(masm, &runtime);
3233 void ToNumberStub::Generate(MacroAssembler* masm) {
3234 // The ToNumber stub takes one argument in rax.
3236 __ JumpIfNotSmi(rax, ¬_smi, Label::kNear);
3240 Label not_heap_number;
3241 __ CompareRoot(FieldOperand(rax, HeapObject::kMapOffset),
3242 Heap::kHeapNumberMapRootIndex);
3243 __ j(not_equal, ¬_heap_number, Label::kNear);
3245 __ bind(¬_heap_number);
3247 Label not_string, slow_string;
3248 __ CmpObjectType(rax, FIRST_NONSTRING_TYPE, rdi);
3251 __ j(above_equal, ¬_string, Label::kNear);
3252 // Check if string has a cached array index.
3253 __ testl(FieldOperand(rax, String::kHashFieldOffset),
3254 Immediate(String::kContainsCachedArrayIndexMask));
3255 __ j(not_zero, &slow_string, Label::kNear);
3256 __ movl(rax, FieldOperand(rax, String::kHashFieldOffset));
3257 __ IndexFromHash(rax, rax);
3259 __ bind(&slow_string);
3260 __ PopReturnAddressTo(rcx); // Pop return address.
3261 __ Push(rax); // Push argument.
3262 __ PushReturnAddressFrom(rcx); // Push return address.
3263 __ TailCallRuntime(Runtime::kStringToNumber, 1, 1);
3264 __ bind(¬_string);
3267 __ CmpInstanceType(rdi, ODDBALL_TYPE);
3268 __ j(not_equal, ¬_oddball, Label::kNear);
3269 __ movp(rax, FieldOperand(rax, Oddball::kToNumberOffset));
3271 __ bind(¬_oddball);
3273 __ PopReturnAddressTo(rcx); // Pop return address.
3274 __ Push(rax); // Push argument.
3275 __ PushReturnAddressFrom(rcx); // Push return address.
3276 __ InvokeBuiltin(Builtins::TO_NUMBER, JUMP_FUNCTION);
3280 void StringHelper::GenerateFlatOneByteStringEquals(MacroAssembler* masm,
3284 Register scratch2) {
3285 Register length = scratch1;
3288 Label check_zero_length;
3289 __ movp(length, FieldOperand(left, String::kLengthOffset));
3290 __ SmiCompare(length, FieldOperand(right, String::kLengthOffset));
3291 __ j(equal, &check_zero_length, Label::kNear);
3292 __ Move(rax, Smi::FromInt(NOT_EQUAL));
3295 // Check if the length is zero.
3296 Label compare_chars;
3297 __ bind(&check_zero_length);
3298 STATIC_ASSERT(kSmiTag == 0);
3300 __ j(not_zero, &compare_chars, Label::kNear);
3301 __ Move(rax, Smi::FromInt(EQUAL));
3304 // Compare characters.
3305 __ bind(&compare_chars);
3306 Label strings_not_equal;
3307 GenerateOneByteCharsCompareLoop(masm, left, right, length, scratch2,
3308 &strings_not_equal, Label::kNear);
3310 // Characters are equal.
3311 __ Move(rax, Smi::FromInt(EQUAL));
3314 // Characters are not equal.
3315 __ bind(&strings_not_equal);
3316 __ Move(rax, Smi::FromInt(NOT_EQUAL));
3321 void StringHelper::GenerateCompareFlatOneByteStrings(
3322 MacroAssembler* masm, Register left, Register right, Register scratch1,
3323 Register scratch2, Register scratch3, Register scratch4) {
3324 // Ensure that you can always subtract a string length from a non-negative
3325 // number (e.g. another length).
3326 STATIC_ASSERT(String::kMaxLength < 0x7fffffff);
3328 // Find minimum length and length difference.
3329 __ movp(scratch1, FieldOperand(left, String::kLengthOffset));
3330 __ movp(scratch4, scratch1);
3333 FieldOperand(right, String::kLengthOffset));
3334 // Register scratch4 now holds left.length - right.length.
3335 const Register length_difference = scratch4;
3337 __ j(less, &left_shorter, Label::kNear);
3338 // The right string isn't longer that the left one.
3339 // Get the right string's length by subtracting the (non-negative) difference
3340 // from the left string's length.
3341 __ SmiSub(scratch1, scratch1, length_difference);
3342 __ bind(&left_shorter);
3343 // Register scratch1 now holds Min(left.length, right.length).
3344 const Register min_length = scratch1;
3346 Label compare_lengths;
3347 // If min-length is zero, go directly to comparing lengths.
3348 __ SmiTest(min_length);
3349 __ j(zero, &compare_lengths, Label::kNear);
3352 Label result_not_equal;
3353 GenerateOneByteCharsCompareLoop(
3354 masm, left, right, min_length, scratch2, &result_not_equal,
3355 // In debug-code mode, SmiTest below might push
3356 // the target label outside the near range.
3359 // Completed loop without finding different characters.
3360 // Compare lengths (precomputed).
3361 __ bind(&compare_lengths);
3362 __ SmiTest(length_difference);
3363 Label length_not_equal;
3364 __ j(not_zero, &length_not_equal, Label::kNear);
3367 __ Move(rax, Smi::FromInt(EQUAL));
3370 Label result_greater;
3372 __ bind(&length_not_equal);
3373 __ j(greater, &result_greater, Label::kNear);
3374 __ jmp(&result_less, Label::kNear);
3375 __ bind(&result_not_equal);
3376 // Unequal comparison of left to right, either character or length.
3377 __ j(above, &result_greater, Label::kNear);
3378 __ bind(&result_less);
3381 __ Move(rax, Smi::FromInt(LESS));
3384 // Result is GREATER.
3385 __ bind(&result_greater);
3386 __ Move(rax, Smi::FromInt(GREATER));
3391 void StringHelper::GenerateOneByteCharsCompareLoop(
3392 MacroAssembler* masm, Register left, Register right, Register length,
3393 Register scratch, Label* chars_not_equal, Label::Distance near_jump) {
3394 // Change index to run from -length to -1 by adding length to string
3395 // start. This means that loop ends when index reaches zero, which
3396 // doesn't need an additional compare.
3397 __ SmiToInteger32(length, length);
3399 FieldOperand(left, length, times_1, SeqOneByteString::kHeaderSize));
3401 FieldOperand(right, length, times_1, SeqOneByteString::kHeaderSize));
3403 Register index = length; // index = -length;
3408 __ movb(scratch, Operand(left, index, times_1, 0));
3409 __ cmpb(scratch, Operand(right, index, times_1, 0));
3410 __ j(not_equal, chars_not_equal, near_jump);
3412 __ j(not_zero, &loop);
3416 void StringCompareStub::Generate(MacroAssembler* masm) {
3419 // Stack frame on entry.
3420 // rsp[0] : return address
3421 // rsp[8] : right string
3422 // rsp[16] : left string
3424 StackArgumentsAccessor args(rsp, 2, ARGUMENTS_DONT_CONTAIN_RECEIVER);
3425 __ movp(rdx, args.GetArgumentOperand(0)); // left
3426 __ movp(rax, args.GetArgumentOperand(1)); // right
3428 // Check for identity.
3431 __ j(not_equal, ¬_same, Label::kNear);
3432 __ Move(rax, Smi::FromInt(EQUAL));
3433 Counters* counters = isolate()->counters();
3434 __ IncrementCounter(counters->string_compare_native(), 1);
3435 __ ret(2 * kPointerSize);
3439 // Check that both are sequential one-byte strings.
3440 __ JumpIfNotBothSequentialOneByteStrings(rdx, rax, rcx, rbx, &runtime);
3442 // Inline comparison of one-byte strings.
3443 __ IncrementCounter(counters->string_compare_native(), 1);
3444 // Drop arguments from the stack
3445 __ PopReturnAddressTo(rcx);
3446 __ addp(rsp, Immediate(2 * kPointerSize));
3447 __ PushReturnAddressFrom(rcx);
3448 StringHelper::GenerateCompareFlatOneByteStrings(masm, rdx, rax, rcx, rbx, rdi,
3451 // Call the runtime; it returns -1 (less), 0 (equal), or 1 (greater)
3452 // tagged as a small integer.
3454 __ TailCallRuntime(Runtime::kStringCompare, 2, 1);
3458 void BinaryOpICWithAllocationSiteStub::Generate(MacroAssembler* masm) {
3459 // ----------- S t a t e -------------
3462 // -- rsp[0] : return address
3463 // -----------------------------------
3465 // Load rcx with the allocation site. We stick an undefined dummy value here
3466 // and replace it with the real allocation site later when we instantiate this
3467 // stub in BinaryOpICWithAllocationSiteStub::GetCodeCopyFromTemplate().
3468 __ Move(rcx, handle(isolate()->heap()->undefined_value()));
3470 // Make sure that we actually patched the allocation site.
3471 if (FLAG_debug_code) {
3472 __ testb(rcx, Immediate(kSmiTagMask));
3473 __ Assert(not_equal, kExpectedAllocationSite);
3474 __ Cmp(FieldOperand(rcx, HeapObject::kMapOffset),
3475 isolate()->factory()->allocation_site_map());
3476 __ Assert(equal, kExpectedAllocationSite);
3479 // Tail call into the stub that handles binary operations with allocation
3481 BinaryOpWithAllocationSiteStub stub(isolate(), state());
3482 __ TailCallStub(&stub);
3486 void CompareICStub::GenerateSmis(MacroAssembler* masm) {
3487 DCHECK(state() == CompareICState::SMI);
3489 __ JumpIfNotBothSmi(rdx, rax, &miss, Label::kNear);
3491 if (GetCondition() == equal) {
3492 // For equality we do not care about the sign of the result.
3497 __ j(no_overflow, &done, Label::kNear);
3498 // Correct sign of result in case of overflow.
3510 void CompareICStub::GenerateNumbers(MacroAssembler* masm) {
3511 DCHECK(state() == CompareICState::NUMBER);
3514 Label unordered, maybe_undefined1, maybe_undefined2;
3517 if (left() == CompareICState::SMI) {
3518 __ JumpIfNotSmi(rdx, &miss);
3520 if (right() == CompareICState::SMI) {
3521 __ JumpIfNotSmi(rax, &miss);
3524 // Load left and right operand.
3525 Label done, left, left_smi, right_smi;
3526 __ JumpIfSmi(rax, &right_smi, Label::kNear);
3527 __ CompareMap(rax, isolate()->factory()->heap_number_map());
3528 __ j(not_equal, &maybe_undefined1, Label::kNear);
3529 __ movsd(xmm1, FieldOperand(rax, HeapNumber::kValueOffset));
3530 __ jmp(&left, Label::kNear);
3531 __ bind(&right_smi);
3532 __ SmiToInteger32(rcx, rax); // Can't clobber rax yet.
3533 __ Cvtlsi2sd(xmm1, rcx);
3536 __ JumpIfSmi(rdx, &left_smi, Label::kNear);
3537 __ CompareMap(rdx, isolate()->factory()->heap_number_map());
3538 __ j(not_equal, &maybe_undefined2, Label::kNear);
3539 __ movsd(xmm0, FieldOperand(rdx, HeapNumber::kValueOffset));
3542 __ SmiToInteger32(rcx, rdx); // Can't clobber rdx yet.
3543 __ Cvtlsi2sd(xmm0, rcx);
3547 __ ucomisd(xmm0, xmm1);
3549 // Don't base result on EFLAGS when a NaN is involved.
3550 __ j(parity_even, &unordered, Label::kNear);
3552 // Return a result of -1, 0, or 1, based on EFLAGS.
3553 // Performing mov, because xor would destroy the flag register.
3554 __ movl(rax, Immediate(0));
3555 __ movl(rcx, Immediate(0));
3556 __ setcc(above, rax); // Add one to zero if carry clear and not equal.
3557 __ sbbp(rax, rcx); // Subtract one if below (aka. carry set).
3560 __ bind(&unordered);
3561 __ bind(&generic_stub);
3562 CompareICStub stub(isolate(), op(), CompareICState::GENERIC,
3563 CompareICState::GENERIC, CompareICState::GENERIC);
3564 __ jmp(stub.GetCode(), RelocInfo::CODE_TARGET);
3566 __ bind(&maybe_undefined1);
3567 if (Token::IsOrderedRelationalCompareOp(op())) {
3568 __ Cmp(rax, isolate()->factory()->undefined_value());
3569 __ j(not_equal, &miss);
3570 __ JumpIfSmi(rdx, &unordered);
3571 __ CmpObjectType(rdx, HEAP_NUMBER_TYPE, rcx);
3572 __ j(not_equal, &maybe_undefined2, Label::kNear);
3576 __ bind(&maybe_undefined2);
3577 if (Token::IsOrderedRelationalCompareOp(op())) {
3578 __ Cmp(rdx, isolate()->factory()->undefined_value());
3579 __ j(equal, &unordered);
3587 void CompareICStub::GenerateInternalizedStrings(MacroAssembler* masm) {
3588 DCHECK(state() == CompareICState::INTERNALIZED_STRING);
3589 DCHECK(GetCondition() == equal);
3591 // Registers containing left and right operands respectively.
3592 Register left = rdx;
3593 Register right = rax;
3594 Register tmp1 = rcx;
3595 Register tmp2 = rbx;
3597 // Check that both operands are heap objects.
3599 Condition cond = masm->CheckEitherSmi(left, right, tmp1);
3600 __ j(cond, &miss, Label::kNear);
3602 // Check that both operands are internalized strings.
3603 __ movp(tmp1, FieldOperand(left, HeapObject::kMapOffset));
3604 __ movp(tmp2, FieldOperand(right, HeapObject::kMapOffset));
3605 __ movzxbp(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset));
3606 __ movzxbp(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset));
3607 STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0);
3609 __ testb(tmp1, Immediate(kIsNotStringMask | kIsNotInternalizedMask));
3610 __ j(not_zero, &miss, Label::kNear);
3612 // Internalized strings are compared by identity.
3614 __ cmpp(left, right);
3615 // Make sure rax is non-zero. At this point input operands are
3616 // guaranteed to be non-zero.
3617 DCHECK(right.is(rax));
3618 __ j(not_equal, &done, Label::kNear);
3619 STATIC_ASSERT(EQUAL == 0);
3620 STATIC_ASSERT(kSmiTag == 0);
3621 __ Move(rax, Smi::FromInt(EQUAL));
3630 void CompareICStub::GenerateUniqueNames(MacroAssembler* masm) {
3631 DCHECK(state() == CompareICState::UNIQUE_NAME);
3632 DCHECK(GetCondition() == equal);
3634 // Registers containing left and right operands respectively.
3635 Register left = rdx;
3636 Register right = rax;
3637 Register tmp1 = rcx;
3638 Register tmp2 = rbx;
3640 // Check that both operands are heap objects.
3642 Condition cond = masm->CheckEitherSmi(left, right, tmp1);
3643 __ j(cond, &miss, Label::kNear);
3645 // Check that both operands are unique names. This leaves the instance
3646 // types loaded in tmp1 and tmp2.
3647 __ movp(tmp1, FieldOperand(left, HeapObject::kMapOffset));
3648 __ movp(tmp2, FieldOperand(right, HeapObject::kMapOffset));
3649 __ movzxbp(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset));
3650 __ movzxbp(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset));
3652 __ JumpIfNotUniqueNameInstanceType(tmp1, &miss, Label::kNear);
3653 __ JumpIfNotUniqueNameInstanceType(tmp2, &miss, Label::kNear);
3655 // Unique names are compared by identity.
3657 __ cmpp(left, right);
3658 // Make sure rax is non-zero. At this point input operands are
3659 // guaranteed to be non-zero.
3660 DCHECK(right.is(rax));
3661 __ j(not_equal, &done, Label::kNear);
3662 STATIC_ASSERT(EQUAL == 0);
3663 STATIC_ASSERT(kSmiTag == 0);
3664 __ Move(rax, Smi::FromInt(EQUAL));
3673 void CompareICStub::GenerateStrings(MacroAssembler* masm) {
3674 DCHECK(state() == CompareICState::STRING);
3677 bool equality = Token::IsEqualityOp(op());
3679 // Registers containing left and right operands respectively.
3680 Register left = rdx;
3681 Register right = rax;
3682 Register tmp1 = rcx;
3683 Register tmp2 = rbx;
3684 Register tmp3 = rdi;
3686 // Check that both operands are heap objects.
3687 Condition cond = masm->CheckEitherSmi(left, right, tmp1);
3690 // Check that both operands are strings. This leaves the instance
3691 // types loaded in tmp1 and tmp2.
3692 __ movp(tmp1, FieldOperand(left, HeapObject::kMapOffset));
3693 __ movp(tmp2, FieldOperand(right, HeapObject::kMapOffset));
3694 __ movzxbp(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset));
3695 __ movzxbp(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset));
3696 __ movp(tmp3, tmp1);
3697 STATIC_ASSERT(kNotStringTag != 0);
3699 __ testb(tmp3, Immediate(kIsNotStringMask));
3700 __ j(not_zero, &miss);
3702 // Fast check for identical strings.
3704 __ cmpp(left, right);
3705 __ j(not_equal, ¬_same, Label::kNear);
3706 STATIC_ASSERT(EQUAL == 0);
3707 STATIC_ASSERT(kSmiTag == 0);
3708 __ Move(rax, Smi::FromInt(EQUAL));
3711 // Handle not identical strings.
3714 // Check that both strings are internalized strings. If they are, we're done
3715 // because we already know they are not identical. We also know they are both
3719 STATIC_ASSERT(kInternalizedTag == 0);
3721 __ testb(tmp1, Immediate(kIsNotInternalizedMask));
3722 __ j(not_zero, &do_compare, Label::kNear);
3723 // Make sure rax is non-zero. At this point input operands are
3724 // guaranteed to be non-zero.
3725 DCHECK(right.is(rax));
3727 __ bind(&do_compare);
3730 // Check that both strings are sequential one-byte.
3732 __ JumpIfNotBothSequentialOneByteStrings(left, right, tmp1, tmp2, &runtime);
3734 // Compare flat one-byte strings. Returns when done.
3736 StringHelper::GenerateFlatOneByteStringEquals(masm, left, right, tmp1,
3739 StringHelper::GenerateCompareFlatOneByteStrings(
3740 masm, left, right, tmp1, tmp2, tmp3, kScratchRegister);
3743 // Handle more complex cases in runtime.
3745 __ PopReturnAddressTo(tmp1);
3748 __ PushReturnAddressFrom(tmp1);
3750 __ TailCallRuntime(Runtime::kStringEquals, 2, 1);
3752 __ TailCallRuntime(Runtime::kStringCompare, 2, 1);
3760 void CompareICStub::GenerateObjects(MacroAssembler* masm) {
3761 DCHECK(state() == CompareICState::OBJECT);
3763 Condition either_smi = masm->CheckEitherSmi(rdx, rax);
3764 __ j(either_smi, &miss, Label::kNear);
3766 __ CmpObjectType(rax, JS_OBJECT_TYPE, rcx);
3767 __ j(not_equal, &miss, Label::kNear);
3768 __ CmpObjectType(rdx, JS_OBJECT_TYPE, rcx);
3769 __ j(not_equal, &miss, Label::kNear);
3771 DCHECK(GetCondition() == equal);
3780 void CompareICStub::GenerateKnownObjects(MacroAssembler* masm) {
3782 Handle<WeakCell> cell = Map::WeakCellForMap(known_map_);
3783 Condition either_smi = masm->CheckEitherSmi(rdx, rax);
3784 __ j(either_smi, &miss, Label::kNear);
3786 __ GetWeakValue(rdi, cell);
3787 __ movp(rcx, FieldOperand(rax, HeapObject::kMapOffset));
3788 __ movp(rbx, FieldOperand(rdx, HeapObject::kMapOffset));
3790 __ j(not_equal, &miss, Label::kNear);
3792 __ j(not_equal, &miss, Label::kNear);
3802 void CompareICStub::GenerateMiss(MacroAssembler* masm) {
3804 // Call the runtime system in a fresh internal frame.
3805 ExternalReference miss =
3806 ExternalReference(IC_Utility(IC::kCompareIC_Miss), isolate());
3808 FrameScope scope(masm, StackFrame::INTERNAL);
3813 __ Push(Smi::FromInt(op()));
3814 __ CallExternalReference(miss, 3);
3816 // Compute the entry point of the rewritten stub.
3817 __ leap(rdi, FieldOperand(rax, Code::kHeaderSize));
3822 // Do a tail call to the rewritten stub.
3827 void NameDictionaryLookupStub::GenerateNegativeLookup(MacroAssembler* masm,
3830 Register properties,
3833 DCHECK(name->IsUniqueName());
3834 // If names of slots in range from 1 to kProbes - 1 for the hash value are
3835 // not equal to the name and kProbes-th slot is not used (its name is the
3836 // undefined value), it guarantees the hash table doesn't contain the
3837 // property. It's true even if some slots represent deleted properties
3838 // (their names are the hole value).
3839 for (int i = 0; i < kInlinedProbes; i++) {
3840 // r0 points to properties hash.
3841 // Compute the masked index: (hash + i + i * i) & mask.
3842 Register index = r0;
3843 // Capacity is smi 2^n.
3844 __ SmiToInteger32(index, FieldOperand(properties, kCapacityOffset));
3847 Immediate(name->Hash() + NameDictionary::GetProbeOffset(i)));
3849 // Scale the index by multiplying by the entry size.
3850 DCHECK(NameDictionary::kEntrySize == 3);
3851 __ leap(index, Operand(index, index, times_2, 0)); // index *= 3.
3853 Register entity_name = r0;
3854 // Having undefined at this place means the name is not contained.
3855 DCHECK_EQ(kSmiTagSize, 1);
3856 __ movp(entity_name, Operand(properties,
3859 kElementsStartOffset - kHeapObjectTag));
3860 __ Cmp(entity_name, masm->isolate()->factory()->undefined_value());
3863 // Stop if found the property.
3864 __ Cmp(entity_name, Handle<Name>(name));
3868 // Check for the hole and skip.
3869 __ CompareRoot(entity_name, Heap::kTheHoleValueRootIndex);
3870 __ j(equal, &good, Label::kNear);
3872 // Check if the entry name is not a unique name.
3873 __ movp(entity_name, FieldOperand(entity_name, HeapObject::kMapOffset));
3874 __ JumpIfNotUniqueNameInstanceType(
3875 FieldOperand(entity_name, Map::kInstanceTypeOffset), miss);
3879 NameDictionaryLookupStub stub(masm->isolate(), properties, r0, r0,
3881 __ Push(Handle<Object>(name));
3882 __ Push(Immediate(name->Hash()));
3885 __ j(not_zero, miss);
3890 // Probe the name dictionary in the |elements| register. Jump to the
3891 // |done| label if a property with the given name is found leaving the
3892 // index into the dictionary in |r1|. Jump to the |miss| label
3894 void NameDictionaryLookupStub::GeneratePositiveLookup(MacroAssembler* masm,
3901 DCHECK(!elements.is(r0));
3902 DCHECK(!elements.is(r1));
3903 DCHECK(!name.is(r0));
3904 DCHECK(!name.is(r1));
3906 __ AssertName(name);
3908 __ SmiToInteger32(r0, FieldOperand(elements, kCapacityOffset));
3911 for (int i = 0; i < kInlinedProbes; i++) {
3912 // Compute the masked index: (hash + i + i * i) & mask.
3913 __ movl(r1, FieldOperand(name, Name::kHashFieldOffset));
3914 __ shrl(r1, Immediate(Name::kHashShift));
3916 __ addl(r1, Immediate(NameDictionary::GetProbeOffset(i)));
3920 // Scale the index by multiplying by the entry size.
3921 DCHECK(NameDictionary::kEntrySize == 3);
3922 __ leap(r1, Operand(r1, r1, times_2, 0)); // r1 = r1 * 3
3924 // Check if the key is identical to the name.
3925 __ cmpp(name, Operand(elements, r1, times_pointer_size,
3926 kElementsStartOffset - kHeapObjectTag));
3930 NameDictionaryLookupStub stub(masm->isolate(), elements, r0, r1,
3933 __ movl(r0, FieldOperand(name, Name::kHashFieldOffset));
3934 __ shrl(r0, Immediate(Name::kHashShift));
3944 void NameDictionaryLookupStub::Generate(MacroAssembler* masm) {
3945 // This stub overrides SometimesSetsUpAFrame() to return false. That means
3946 // we cannot call anything that could cause a GC from this stub.
3947 // Stack frame on entry:
3948 // rsp[0 * kPointerSize] : return address.
3949 // rsp[1 * kPointerSize] : key's hash.
3950 // rsp[2 * kPointerSize] : key.
3952 // dictionary_: NameDictionary to probe.
3953 // result_: used as scratch.
3954 // index_: will hold an index of entry if lookup is successful.
3955 // might alias with result_.
3957 // result_ is zero if lookup failed, non zero otherwise.
3959 Label in_dictionary, maybe_in_dictionary, not_in_dictionary;
3961 Register scratch = result();
3963 __ SmiToInteger32(scratch, FieldOperand(dictionary(), kCapacityOffset));
3967 // If names of slots in range from 1 to kProbes - 1 for the hash value are
3968 // not equal to the name and kProbes-th slot is not used (its name is the
3969 // undefined value), it guarantees the hash table doesn't contain the
3970 // property. It's true even if some slots represent deleted properties
3971 // (their names are the null value).
3972 StackArgumentsAccessor args(rsp, 2, ARGUMENTS_DONT_CONTAIN_RECEIVER,
3974 for (int i = kInlinedProbes; i < kTotalProbes; i++) {
3975 // Compute the masked index: (hash + i + i * i) & mask.
3976 __ movp(scratch, args.GetArgumentOperand(1));
3978 __ addl(scratch, Immediate(NameDictionary::GetProbeOffset(i)));
3980 __ andp(scratch, Operand(rsp, 0));
3982 // Scale the index by multiplying by the entry size.
3983 DCHECK(NameDictionary::kEntrySize == 3);
3984 __ leap(index(), Operand(scratch, scratch, times_2, 0)); // index *= 3.
3986 // Having undefined at this place means the name is not contained.
3987 __ movp(scratch, Operand(dictionary(), index(), times_pointer_size,
3988 kElementsStartOffset - kHeapObjectTag));
3990 __ Cmp(scratch, isolate()->factory()->undefined_value());
3991 __ j(equal, ¬_in_dictionary);
3993 // Stop if found the property.
3994 __ cmpp(scratch, args.GetArgumentOperand(0));
3995 __ j(equal, &in_dictionary);
3997 if (i != kTotalProbes - 1 && mode() == NEGATIVE_LOOKUP) {
3998 // If we hit a key that is not a unique name during negative
3999 // lookup we have to bailout as this key might be equal to the
4000 // key we are looking for.
4002 // Check if the entry name is not a unique name.
4003 __ movp(scratch, FieldOperand(scratch, HeapObject::kMapOffset));
4004 __ JumpIfNotUniqueNameInstanceType(
4005 FieldOperand(scratch, Map::kInstanceTypeOffset),
4006 &maybe_in_dictionary);
4010 __ bind(&maybe_in_dictionary);
4011 // If we are doing negative lookup then probing failure should be
4012 // treated as a lookup success. For positive lookup probing failure
4013 // should be treated as lookup failure.
4014 if (mode() == POSITIVE_LOOKUP) {
4015 __ movp(scratch, Immediate(0));
4017 __ ret(2 * kPointerSize);
4020 __ bind(&in_dictionary);
4021 __ movp(scratch, Immediate(1));
4023 __ ret(2 * kPointerSize);
4025 __ bind(¬_in_dictionary);
4026 __ movp(scratch, Immediate(0));
4028 __ ret(2 * kPointerSize);
4032 void StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(
4034 StoreBufferOverflowStub stub1(isolate, kDontSaveFPRegs);
4036 StoreBufferOverflowStub stub2(isolate, kSaveFPRegs);
4041 // Takes the input in 3 registers: address_ value_ and object_. A pointer to
4042 // the value has just been written into the object, now this stub makes sure
4043 // we keep the GC informed. The word in the object where the value has been
4044 // written is in the address register.
4045 void RecordWriteStub::Generate(MacroAssembler* masm) {
4046 Label skip_to_incremental_noncompacting;
4047 Label skip_to_incremental_compacting;
4049 // The first two instructions are generated with labels so as to get the
4050 // offset fixed up correctly by the bind(Label*) call. We patch it back and
4051 // forth between a compare instructions (a nop in this position) and the
4052 // real branch when we start and stop incremental heap marking.
4053 // See RecordWriteStub::Patch for details.
4054 __ jmp(&skip_to_incremental_noncompacting, Label::kNear);
4055 __ jmp(&skip_to_incremental_compacting, Label::kFar);
4057 if (remembered_set_action() == EMIT_REMEMBERED_SET) {
4058 __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(),
4059 MacroAssembler::kReturnAtEnd);
4064 __ bind(&skip_to_incremental_noncompacting);
4065 GenerateIncremental(masm, INCREMENTAL);
4067 __ bind(&skip_to_incremental_compacting);
4068 GenerateIncremental(masm, INCREMENTAL_COMPACTION);
4070 // Initial mode of the stub is expected to be STORE_BUFFER_ONLY.
4071 // Will be checked in IncrementalMarking::ActivateGeneratedStub.
4072 masm->set_byte_at(0, kTwoByteNopInstruction);
4073 masm->set_byte_at(2, kFiveByteNopInstruction);
4077 void RecordWriteStub::GenerateIncremental(MacroAssembler* masm, Mode mode) {
4080 if (remembered_set_action() == EMIT_REMEMBERED_SET) {
4081 Label dont_need_remembered_set;
4083 __ movp(regs_.scratch0(), Operand(regs_.address(), 0));
4084 __ JumpIfNotInNewSpace(regs_.scratch0(),
4086 &dont_need_remembered_set);
4088 __ CheckPageFlag(regs_.object(),
4090 1 << MemoryChunk::SCAN_ON_SCAVENGE,
4092 &dont_need_remembered_set);
4094 // First notify the incremental marker if necessary, then update the
4096 CheckNeedsToInformIncrementalMarker(
4097 masm, kUpdateRememberedSetOnNoNeedToInformIncrementalMarker, mode);
4098 InformIncrementalMarker(masm);
4099 regs_.Restore(masm);
4100 __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(),
4101 MacroAssembler::kReturnAtEnd);
4103 __ bind(&dont_need_remembered_set);
4106 CheckNeedsToInformIncrementalMarker(
4107 masm, kReturnOnNoNeedToInformIncrementalMarker, mode);
4108 InformIncrementalMarker(masm);
4109 regs_.Restore(masm);
4114 void RecordWriteStub::InformIncrementalMarker(MacroAssembler* masm) {
4115 regs_.SaveCallerSaveRegisters(masm, save_fp_regs_mode());
4117 arg_reg_1.is(regs_.address()) ? kScratchRegister : regs_.address();
4118 DCHECK(!address.is(regs_.object()));
4119 DCHECK(!address.is(arg_reg_1));
4120 __ Move(address, regs_.address());
4121 __ Move(arg_reg_1, regs_.object());
4122 // TODO(gc) Can we just set address arg2 in the beginning?
4123 __ Move(arg_reg_2, address);
4124 __ LoadAddress(arg_reg_3,
4125 ExternalReference::isolate_address(isolate()));
4126 int argument_count = 3;
4128 AllowExternalCallThatCantCauseGC scope(masm);
4129 __ PrepareCallCFunction(argument_count);
4131 ExternalReference::incremental_marking_record_write_function(isolate()),
4133 regs_.RestoreCallerSaveRegisters(masm, save_fp_regs_mode());
4137 void RecordWriteStub::CheckNeedsToInformIncrementalMarker(
4138 MacroAssembler* masm,
4139 OnNoNeedToInformIncrementalMarker on_no_need,
4142 Label need_incremental;
4143 Label need_incremental_pop_object;
4145 __ movp(regs_.scratch0(), Immediate(~Page::kPageAlignmentMask));
4146 __ andp(regs_.scratch0(), regs_.object());
4147 __ movp(regs_.scratch1(),
4148 Operand(regs_.scratch0(),
4149 MemoryChunk::kWriteBarrierCounterOffset));
4150 __ subp(regs_.scratch1(), Immediate(1));
4151 __ movp(Operand(regs_.scratch0(),
4152 MemoryChunk::kWriteBarrierCounterOffset),
4154 __ j(negative, &need_incremental);
4156 // Let's look at the color of the object: If it is not black we don't have
4157 // to inform the incremental marker.
4158 __ JumpIfBlack(regs_.object(),
4164 regs_.Restore(masm);
4165 if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) {
4166 __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(),
4167 MacroAssembler::kReturnAtEnd);
4174 // Get the value from the slot.
4175 __ movp(regs_.scratch0(), Operand(regs_.address(), 0));
4177 if (mode == INCREMENTAL_COMPACTION) {
4178 Label ensure_not_white;
4180 __ CheckPageFlag(regs_.scratch0(), // Contains value.
4181 regs_.scratch1(), // Scratch.
4182 MemoryChunk::kEvacuationCandidateMask,
4187 __ CheckPageFlag(regs_.object(),
4188 regs_.scratch1(), // Scratch.
4189 MemoryChunk::kSkipEvacuationSlotsRecordingMask,
4193 __ bind(&ensure_not_white);
4196 // We need an extra register for this, so we push the object register
4198 __ Push(regs_.object());
4199 __ EnsureNotWhite(regs_.scratch0(), // The value.
4200 regs_.scratch1(), // Scratch.
4201 regs_.object(), // Scratch.
4202 &need_incremental_pop_object,
4204 __ Pop(regs_.object());
4206 regs_.Restore(masm);
4207 if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) {
4208 __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(),
4209 MacroAssembler::kReturnAtEnd);
4214 __ bind(&need_incremental_pop_object);
4215 __ Pop(regs_.object());
4217 __ bind(&need_incremental);
4219 // Fall through when we need to inform the incremental marker.
4223 void StoreArrayLiteralElementStub::Generate(MacroAssembler* masm) {
4224 // ----------- S t a t e -------------
4225 // -- rax : element value to store
4226 // -- rcx : element index as smi
4227 // -- rsp[0] : return address
4228 // -- rsp[8] : array literal index in function
4229 // -- rsp[16] : array literal
4230 // clobbers rbx, rdx, rdi
4231 // -----------------------------------
4234 Label double_elements;
4236 Label slow_elements;
4237 Label fast_elements;
4239 // Get array literal index, array literal and its map.
4240 StackArgumentsAccessor args(rsp, 2, ARGUMENTS_DONT_CONTAIN_RECEIVER);
4241 __ movp(rdx, args.GetArgumentOperand(1));
4242 __ movp(rbx, args.GetArgumentOperand(0));
4243 __ movp(rdi, FieldOperand(rbx, JSObject::kMapOffset));
4245 __ CheckFastElements(rdi, &double_elements);
4247 // FAST_*_SMI_ELEMENTS or FAST_*_ELEMENTS
4248 __ JumpIfSmi(rax, &smi_element);
4249 __ CheckFastSmiElements(rdi, &fast_elements);
4251 // Store into the array literal requires a elements transition. Call into
4254 __ bind(&slow_elements);
4255 __ PopReturnAddressTo(rdi);
4259 __ movp(rbx, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset));
4260 __ Push(FieldOperand(rbx, JSFunction::kLiteralsOffset));
4262 __ PushReturnAddressFrom(rdi);
4263 __ TailCallRuntime(Runtime::kStoreArrayLiteralElement, 5, 1);
4265 // Array literal has ElementsKind of FAST_*_ELEMENTS and value is an object.
4266 __ bind(&fast_elements);
4267 __ SmiToInteger32(kScratchRegister, rcx);
4268 __ movp(rbx, FieldOperand(rbx, JSObject::kElementsOffset));
4269 __ leap(rcx, FieldOperand(rbx, kScratchRegister, times_pointer_size,
4270 FixedArrayBase::kHeaderSize));
4271 __ movp(Operand(rcx, 0), rax);
4272 // Update the write barrier for the array store.
4273 __ RecordWrite(rbx, rcx, rax,
4275 EMIT_REMEMBERED_SET,
4279 // Array literal has ElementsKind of FAST_*_SMI_ELEMENTS or
4280 // FAST_*_ELEMENTS, and value is Smi.
4281 __ bind(&smi_element);
4282 __ SmiToInteger32(kScratchRegister, rcx);
4283 __ movp(rbx, FieldOperand(rbx, JSObject::kElementsOffset));
4284 __ movp(FieldOperand(rbx, kScratchRegister, times_pointer_size,
4285 FixedArrayBase::kHeaderSize), rax);
4288 // Array literal has ElementsKind of FAST_DOUBLE_ELEMENTS.
4289 __ bind(&double_elements);
4291 __ movp(r9, FieldOperand(rbx, JSObject::kElementsOffset));
4292 __ SmiToInteger32(r11, rcx);
4293 __ StoreNumberToDoubleElements(rax,
4302 void StubFailureTrampolineStub::Generate(MacroAssembler* masm) {
4303 CEntryStub ces(isolate(), 1, kSaveFPRegs);
4304 __ Call(ces.GetCode(), RelocInfo::CODE_TARGET);
4305 int parameter_count_offset =
4306 StubFailureTrampolineFrame::kCallerStackParameterCountFrameOffset;
4307 __ movp(rbx, MemOperand(rbp, parameter_count_offset));
4308 masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE);
4309 __ PopReturnAddressTo(rcx);
4310 int additional_offset =
4311 function_mode() == JS_FUNCTION_STUB_MODE ? kPointerSize : 0;
4312 __ leap(rsp, MemOperand(rsp, rbx, times_pointer_size, additional_offset));
4313 __ jmp(rcx); // Return to IC Miss stub, continuation still on stack.
4317 void LoadICTrampolineStub::Generate(MacroAssembler* masm) {
4318 EmitLoadTypeFeedbackVector(masm, VectorLoadICDescriptor::VectorRegister());
4319 VectorLoadStub stub(isolate(), state());
4320 __ jmp(stub.GetCode(), RelocInfo::CODE_TARGET);
4324 void KeyedLoadICTrampolineStub::Generate(MacroAssembler* masm) {
4325 EmitLoadTypeFeedbackVector(masm, VectorLoadICDescriptor::VectorRegister());
4326 VectorKeyedLoadStub stub(isolate());
4327 __ jmp(stub.GetCode(), RelocInfo::CODE_TARGET);
4331 void CallICTrampolineStub::Generate(MacroAssembler* masm) {
4332 EmitLoadTypeFeedbackVector(masm, rbx);
4333 CallICStub stub(isolate(), state());
4334 __ jmp(stub.GetCode(), RelocInfo::CODE_TARGET);
4338 void CallIC_ArrayTrampolineStub::Generate(MacroAssembler* masm) {
4339 EmitLoadTypeFeedbackVector(masm, rbx);
4340 CallIC_ArrayStub stub(isolate(), state());
4341 __ jmp(stub.GetCode(), RelocInfo::CODE_TARGET);
4345 void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) {
4346 if (masm->isolate()->function_entry_hook() != NULL) {
4347 ProfileEntryHookStub stub(masm->isolate());
4348 masm->CallStub(&stub);
4353 void ProfileEntryHookStub::Generate(MacroAssembler* masm) {
4354 // This stub can be called from essentially anywhere, so it needs to save
4355 // all volatile and callee-save registers.
4356 const size_t kNumSavedRegisters = 2;
4357 __ pushq(arg_reg_1);
4358 __ pushq(arg_reg_2);
4360 // Calculate the original stack pointer and store it in the second arg.
4362 Operand(rsp, kNumSavedRegisters * kRegisterSize + kPCOnStackSize));
4364 // Calculate the function address to the first arg.
4365 __ movp(arg_reg_1, Operand(rsp, kNumSavedRegisters * kRegisterSize));
4366 __ subp(arg_reg_1, Immediate(Assembler::kShortCallInstructionLength));
4368 // Save the remainder of the volatile registers.
4369 masm->PushCallerSaved(kSaveFPRegs, arg_reg_1, arg_reg_2);
4371 // Call the entry hook function.
4372 __ Move(rax, FUNCTION_ADDR(isolate()->function_entry_hook()),
4373 Assembler::RelocInfoNone());
4375 AllowExternalCallThatCantCauseGC scope(masm);
4377 const int kArgumentCount = 2;
4378 __ PrepareCallCFunction(kArgumentCount);
4379 __ CallCFunction(rax, kArgumentCount);
4381 // Restore volatile regs.
4382 masm->PopCallerSaved(kSaveFPRegs, arg_reg_1, arg_reg_2);
4391 static void CreateArrayDispatch(MacroAssembler* masm,
4392 AllocationSiteOverrideMode mode) {
4393 if (mode == DISABLE_ALLOCATION_SITES) {
4394 T stub(masm->isolate(), GetInitialFastElementsKind(), mode);
4395 __ TailCallStub(&stub);
4396 } else if (mode == DONT_OVERRIDE) {
4397 int last_index = GetSequenceIndexFromFastElementsKind(
4398 TERMINAL_FAST_ELEMENTS_KIND);
4399 for (int i = 0; i <= last_index; ++i) {
4401 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
4402 __ cmpl(rdx, Immediate(kind));
4403 __ j(not_equal, &next);
4404 T stub(masm->isolate(), kind);
4405 __ TailCallStub(&stub);
4409 // If we reached this point there is a problem.
4410 __ Abort(kUnexpectedElementsKindInArrayConstructor);
4417 static void CreateArrayDispatchOneArgument(MacroAssembler* masm,
4418 AllocationSiteOverrideMode mode) {
4419 // rbx - allocation site (if mode != DISABLE_ALLOCATION_SITES)
4420 // rdx - kind (if mode != DISABLE_ALLOCATION_SITES)
4421 // rax - number of arguments
4422 // rdi - constructor?
4423 // rsp[0] - return address
4424 // rsp[8] - last argument
4425 Handle<Object> undefined_sentinel(
4426 masm->isolate()->heap()->undefined_value(),
4429 Label normal_sequence;
4430 if (mode == DONT_OVERRIDE) {
4431 DCHECK(FAST_SMI_ELEMENTS == 0);
4432 DCHECK(FAST_HOLEY_SMI_ELEMENTS == 1);
4433 DCHECK(FAST_ELEMENTS == 2);
4434 DCHECK(FAST_HOLEY_ELEMENTS == 3);
4435 DCHECK(FAST_DOUBLE_ELEMENTS == 4);
4436 DCHECK(FAST_HOLEY_DOUBLE_ELEMENTS == 5);
4438 // is the low bit set? If so, we are holey and that is good.
4439 __ testb(rdx, Immediate(1));
4440 __ j(not_zero, &normal_sequence);
4443 // look at the first argument
4444 StackArgumentsAccessor args(rsp, 1, ARGUMENTS_DONT_CONTAIN_RECEIVER);
4445 __ movp(rcx, args.GetArgumentOperand(0));
4447 __ j(zero, &normal_sequence);
4449 if (mode == DISABLE_ALLOCATION_SITES) {
4450 ElementsKind initial = GetInitialFastElementsKind();
4451 ElementsKind holey_initial = GetHoleyElementsKind(initial);
4453 ArraySingleArgumentConstructorStub stub_holey(masm->isolate(),
4455 DISABLE_ALLOCATION_SITES);
4456 __ TailCallStub(&stub_holey);
4458 __ bind(&normal_sequence);
4459 ArraySingleArgumentConstructorStub stub(masm->isolate(),
4461 DISABLE_ALLOCATION_SITES);
4462 __ TailCallStub(&stub);
4463 } else if (mode == DONT_OVERRIDE) {
4464 // We are going to create a holey array, but our kind is non-holey.
4465 // Fix kind and retry (only if we have an allocation site in the slot).
4468 if (FLAG_debug_code) {
4469 Handle<Map> allocation_site_map =
4470 masm->isolate()->factory()->allocation_site_map();
4471 __ Cmp(FieldOperand(rbx, 0), allocation_site_map);
4472 __ Assert(equal, kExpectedAllocationSite);
4475 // Save the resulting elements kind in type info. We can't just store r3
4476 // in the AllocationSite::transition_info field because elements kind is
4477 // restricted to a portion of the field...upper bits need to be left alone.
4478 STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0);
4479 __ SmiAddConstant(FieldOperand(rbx, AllocationSite::kTransitionInfoOffset),
4480 Smi::FromInt(kFastElementsKindPackedToHoley));
4482 __ bind(&normal_sequence);
4483 int last_index = GetSequenceIndexFromFastElementsKind(
4484 TERMINAL_FAST_ELEMENTS_KIND);
4485 for (int i = 0; i <= last_index; ++i) {
4487 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
4488 __ cmpl(rdx, Immediate(kind));
4489 __ j(not_equal, &next);
4490 ArraySingleArgumentConstructorStub stub(masm->isolate(), kind);
4491 __ TailCallStub(&stub);
4495 // If we reached this point there is a problem.
4496 __ Abort(kUnexpectedElementsKindInArrayConstructor);
4504 static void ArrayConstructorStubAheadOfTimeHelper(Isolate* isolate) {
4505 int to_index = GetSequenceIndexFromFastElementsKind(
4506 TERMINAL_FAST_ELEMENTS_KIND);
4507 for (int i = 0; i <= to_index; ++i) {
4508 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
4509 T stub(isolate, kind);
4511 if (AllocationSite::GetMode(kind) != DONT_TRACK_ALLOCATION_SITE) {
4512 T stub1(isolate, kind, DISABLE_ALLOCATION_SITES);
4519 void ArrayConstructorStubBase::GenerateStubsAheadOfTime(Isolate* isolate) {
4520 ArrayConstructorStubAheadOfTimeHelper<ArrayNoArgumentConstructorStub>(
4522 ArrayConstructorStubAheadOfTimeHelper<ArraySingleArgumentConstructorStub>(
4524 ArrayConstructorStubAheadOfTimeHelper<ArrayNArgumentsConstructorStub>(
4529 void InternalArrayConstructorStubBase::GenerateStubsAheadOfTime(
4531 ElementsKind kinds[2] = { FAST_ELEMENTS, FAST_HOLEY_ELEMENTS };
4532 for (int i = 0; i < 2; i++) {
4533 // For internal arrays we only need a few things
4534 InternalArrayNoArgumentConstructorStub stubh1(isolate, kinds[i]);
4536 InternalArraySingleArgumentConstructorStub stubh2(isolate, kinds[i]);
4538 InternalArrayNArgumentsConstructorStub stubh3(isolate, kinds[i]);
4544 void ArrayConstructorStub::GenerateDispatchToArrayStub(
4545 MacroAssembler* masm,
4546 AllocationSiteOverrideMode mode) {
4547 if (argument_count() == ANY) {
4548 Label not_zero_case, not_one_case;
4550 __ j(not_zero, ¬_zero_case);
4551 CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm, mode);
4553 __ bind(¬_zero_case);
4554 __ cmpl(rax, Immediate(1));
4555 __ j(greater, ¬_one_case);
4556 CreateArrayDispatchOneArgument(masm, mode);
4558 __ bind(¬_one_case);
4559 CreateArrayDispatch<ArrayNArgumentsConstructorStub>(masm, mode);
4560 } else if (argument_count() == NONE) {
4561 CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm, mode);
4562 } else if (argument_count() == ONE) {
4563 CreateArrayDispatchOneArgument(masm, mode);
4564 } else if (argument_count() == MORE_THAN_ONE) {
4565 CreateArrayDispatch<ArrayNArgumentsConstructorStub>(masm, mode);
4572 void ArrayConstructorStub::Generate(MacroAssembler* masm) {
4573 // ----------- S t a t e -------------
4575 // -- rbx : AllocationSite or undefined
4576 // -- rdi : constructor
4577 // -- rdx : original constructor
4578 // -- rsp[0] : return address
4579 // -- rsp[8] : last argument
4580 // -----------------------------------
4581 if (FLAG_debug_code) {
4582 // The array construct code is only set for the global and natives
4583 // builtin Array functions which always have maps.
4585 // Initial map for the builtin Array function should be a map.
4586 __ movp(rcx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
4587 // Will both indicate a NULL and a Smi.
4588 STATIC_ASSERT(kSmiTag == 0);
4589 Condition not_smi = NegateCondition(masm->CheckSmi(rcx));
4590 __ Check(not_smi, kUnexpectedInitialMapForArrayFunction);
4591 __ CmpObjectType(rcx, MAP_TYPE, rcx);
4592 __ Check(equal, kUnexpectedInitialMapForArrayFunction);
4594 // We should either have undefined in rbx or a valid AllocationSite
4595 __ AssertUndefinedOrAllocationSite(rbx);
4600 __ j(not_equal, &subclassing);
4603 // If the feedback vector is the undefined value call an array constructor
4604 // that doesn't use AllocationSites.
4605 __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex);
4606 __ j(equal, &no_info);
4608 // Only look at the lower 16 bits of the transition info.
4609 __ movp(rdx, FieldOperand(rbx, AllocationSite::kTransitionInfoOffset));
4610 __ SmiToInteger32(rdx, rdx);
4611 STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0);
4612 __ andp(rdx, Immediate(AllocationSite::ElementsKindBits::kMask));
4613 GenerateDispatchToArrayStub(masm, DONT_OVERRIDE);
4616 GenerateDispatchToArrayStub(masm, DISABLE_ALLOCATION_SITES);
4619 __ bind(&subclassing);
4620 __ Pop(rcx); // return address.
4625 switch (argument_count()) {
4628 __ addp(rax, Immediate(2));
4631 __ movp(rax, Immediate(2));
4634 __ movp(rax, Immediate(3));
4639 __ JumpToExternalReference(
4640 ExternalReference(Runtime::kArrayConstructorWithSubclassing, isolate()),
4645 void InternalArrayConstructorStub::GenerateCase(
4646 MacroAssembler* masm, ElementsKind kind) {
4647 Label not_zero_case, not_one_case;
4648 Label normal_sequence;
4651 __ j(not_zero, ¬_zero_case);
4652 InternalArrayNoArgumentConstructorStub stub0(isolate(), kind);
4653 __ TailCallStub(&stub0);
4655 __ bind(¬_zero_case);
4656 __ cmpl(rax, Immediate(1));
4657 __ j(greater, ¬_one_case);
4659 if (IsFastPackedElementsKind(kind)) {
4660 // We might need to create a holey array
4661 // look at the first argument
4662 StackArgumentsAccessor args(rsp, 1, ARGUMENTS_DONT_CONTAIN_RECEIVER);
4663 __ movp(rcx, args.GetArgumentOperand(0));
4665 __ j(zero, &normal_sequence);
4667 InternalArraySingleArgumentConstructorStub
4668 stub1_holey(isolate(), GetHoleyElementsKind(kind));
4669 __ TailCallStub(&stub1_holey);
4672 __ bind(&normal_sequence);
4673 InternalArraySingleArgumentConstructorStub stub1(isolate(), kind);
4674 __ TailCallStub(&stub1);
4676 __ bind(¬_one_case);
4677 InternalArrayNArgumentsConstructorStub stubN(isolate(), kind);
4678 __ TailCallStub(&stubN);
4682 void InternalArrayConstructorStub::Generate(MacroAssembler* masm) {
4683 // ----------- S t a t e -------------
4685 // -- rdi : constructor
4686 // -- rsp[0] : return address
4687 // -- rsp[8] : last argument
4688 // -----------------------------------
4690 if (FLAG_debug_code) {
4691 // The array construct code is only set for the global and natives
4692 // builtin Array functions which always have maps.
4694 // Initial map for the builtin Array function should be a map.
4695 __ movp(rcx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
4696 // Will both indicate a NULL and a Smi.
4697 STATIC_ASSERT(kSmiTag == 0);
4698 Condition not_smi = NegateCondition(masm->CheckSmi(rcx));
4699 __ Check(not_smi, kUnexpectedInitialMapForArrayFunction);
4700 __ CmpObjectType(rcx, MAP_TYPE, rcx);
4701 __ Check(equal, kUnexpectedInitialMapForArrayFunction);
4704 // Figure out the right elements kind
4705 __ movp(rcx, FieldOperand(rdi, JSFunction::kPrototypeOrInitialMapOffset));
4707 // Load the map's "bit field 2" into |result|. We only need the first byte,
4708 // but the following masking takes care of that anyway.
4709 __ movzxbp(rcx, FieldOperand(rcx, Map::kBitField2Offset));
4710 // Retrieve elements_kind from bit field 2.
4711 __ DecodeField<Map::ElementsKindBits>(rcx);
4713 if (FLAG_debug_code) {
4715 __ cmpl(rcx, Immediate(FAST_ELEMENTS));
4717 __ cmpl(rcx, Immediate(FAST_HOLEY_ELEMENTS));
4719 kInvalidElementsKindForInternalArrayOrInternalPackedArray);
4723 Label fast_elements_case;
4724 __ cmpl(rcx, Immediate(FAST_ELEMENTS));
4725 __ j(equal, &fast_elements_case);
4726 GenerateCase(masm, FAST_HOLEY_ELEMENTS);
4728 __ bind(&fast_elements_case);
4729 GenerateCase(masm, FAST_ELEMENTS);
4733 static int Offset(ExternalReference ref0, ExternalReference ref1) {
4734 int64_t offset = (ref0.address() - ref1.address());
4735 // Check that fits into int.
4736 DCHECK(static_cast<int>(offset) == offset);
4737 return static_cast<int>(offset);
4741 // Prepares stack to put arguments (aligns and so on). WIN64 calling
4742 // convention requires to put the pointer to the return value slot into
4743 // rcx (rcx must be preserverd until CallApiFunctionAndReturn). Saves
4744 // context (rsi). Clobbers rax. Allocates arg_stack_space * kPointerSize
4745 // inside the exit frame (not GCed) accessible via StackSpaceOperand.
4746 static void PrepareCallApiFunction(MacroAssembler* masm, int arg_stack_space) {
4747 __ EnterApiExitFrame(arg_stack_space);
4751 // Calls an API function. Allocates HandleScope, extracts returned value
4752 // from handle and propagates exceptions. Clobbers r14, r15, rbx and
4753 // caller-save registers. Restores context. On return removes
4754 // stack_space * kPointerSize (GCed).
4755 static void CallApiFunctionAndReturn(MacroAssembler* masm,
4756 Register function_address,
4757 ExternalReference thunk_ref,
4758 Register thunk_last_arg, int stack_space,
4759 Operand* stack_space_operand,
4760 Operand return_value_operand,
4761 Operand* context_restore_operand) {
4763 Label promote_scheduled_exception;
4764 Label exception_handled;
4765 Label delete_allocated_handles;
4766 Label leave_exit_frame;
4769 Isolate* isolate = masm->isolate();
4770 Factory* factory = isolate->factory();
4771 ExternalReference next_address =
4772 ExternalReference::handle_scope_next_address(isolate);
4773 const int kNextOffset = 0;
4774 const int kLimitOffset = Offset(
4775 ExternalReference::handle_scope_limit_address(isolate), next_address);
4776 const int kLevelOffset = Offset(
4777 ExternalReference::handle_scope_level_address(isolate), next_address);
4778 ExternalReference scheduled_exception_address =
4779 ExternalReference::scheduled_exception_address(isolate);
4781 DCHECK(rdx.is(function_address) || r8.is(function_address));
4782 // Allocate HandleScope in callee-save registers.
4783 Register prev_next_address_reg = r14;
4784 Register prev_limit_reg = rbx;
4785 Register base_reg = r15;
4786 __ Move(base_reg, next_address);
4787 __ movp(prev_next_address_reg, Operand(base_reg, kNextOffset));
4788 __ movp(prev_limit_reg, Operand(base_reg, kLimitOffset));
4789 __ addl(Operand(base_reg, kLevelOffset), Immediate(1));
4791 if (FLAG_log_timer_events) {
4792 FrameScope frame(masm, StackFrame::MANUAL);
4793 __ PushSafepointRegisters();
4794 __ PrepareCallCFunction(1);
4795 __ LoadAddress(arg_reg_1, ExternalReference::isolate_address(isolate));
4796 __ CallCFunction(ExternalReference::log_enter_external_function(isolate),
4798 __ PopSafepointRegisters();
4801 Label profiler_disabled;
4802 Label end_profiler_check;
4803 __ Move(rax, ExternalReference::is_profiling_address(isolate));
4804 __ cmpb(Operand(rax, 0), Immediate(0));
4805 __ j(zero, &profiler_disabled);
4807 // Third parameter is the address of the actual getter function.
4808 __ Move(thunk_last_arg, function_address);
4809 __ Move(rax, thunk_ref);
4810 __ jmp(&end_profiler_check);
4812 __ bind(&profiler_disabled);
4813 // Call the api function!
4814 __ Move(rax, function_address);
4816 __ bind(&end_profiler_check);
4818 // Call the api function!
4821 if (FLAG_log_timer_events) {
4822 FrameScope frame(masm, StackFrame::MANUAL);
4823 __ PushSafepointRegisters();
4824 __ PrepareCallCFunction(1);
4825 __ LoadAddress(arg_reg_1, ExternalReference::isolate_address(isolate));
4826 __ CallCFunction(ExternalReference::log_leave_external_function(isolate),
4828 __ PopSafepointRegisters();
4831 // Load the value from ReturnValue
4832 __ movp(rax, return_value_operand);
4835 // No more valid handles (the result handle was the last one). Restore
4836 // previous handle scope.
4837 __ subl(Operand(base_reg, kLevelOffset), Immediate(1));
4838 __ movp(Operand(base_reg, kNextOffset), prev_next_address_reg);
4839 __ cmpp(prev_limit_reg, Operand(base_reg, kLimitOffset));
4840 __ j(not_equal, &delete_allocated_handles);
4841 __ bind(&leave_exit_frame);
4843 // Check if the function scheduled an exception.
4844 __ Move(rsi, scheduled_exception_address);
4845 __ Cmp(Operand(rsi, 0), factory->the_hole_value());
4846 __ j(not_equal, &promote_scheduled_exception);
4847 __ bind(&exception_handled);
4850 // Check if the function returned a valid JavaScript value.
4852 Register return_value = rax;
4855 __ JumpIfSmi(return_value, &ok, Label::kNear);
4856 __ movp(map, FieldOperand(return_value, HeapObject::kMapOffset));
4858 __ CmpInstanceType(map, LAST_NAME_TYPE);
4859 __ j(below_equal, &ok, Label::kNear);
4861 __ CmpInstanceType(map, FIRST_SPEC_OBJECT_TYPE);
4862 __ j(above_equal, &ok, Label::kNear);
4864 __ CompareRoot(map, Heap::kHeapNumberMapRootIndex);
4865 __ j(equal, &ok, Label::kNear);
4867 __ CompareRoot(return_value, Heap::kUndefinedValueRootIndex);
4868 __ j(equal, &ok, Label::kNear);
4870 __ CompareRoot(return_value, Heap::kTrueValueRootIndex);
4871 __ j(equal, &ok, Label::kNear);
4873 __ CompareRoot(return_value, Heap::kFalseValueRootIndex);
4874 __ j(equal, &ok, Label::kNear);
4876 __ CompareRoot(return_value, Heap::kNullValueRootIndex);
4877 __ j(equal, &ok, Label::kNear);
4879 __ Abort(kAPICallReturnedInvalidObject);
4884 bool restore_context = context_restore_operand != NULL;
4885 if (restore_context) {
4886 __ movp(rsi, *context_restore_operand);
4888 if (stack_space_operand != nullptr) {
4889 __ movp(rbx, *stack_space_operand);
4891 __ LeaveApiExitFrame(!restore_context);
4892 if (stack_space_operand != nullptr) {
4893 DCHECK_EQ(stack_space, 0);
4894 __ PopReturnAddressTo(rcx);
4898 __ ret(stack_space * kPointerSize);
4901 __ bind(&promote_scheduled_exception);
4903 FrameScope frame(masm, StackFrame::INTERNAL);
4904 __ CallRuntime(Runtime::kPromoteScheduledException, 0);
4906 __ jmp(&exception_handled);
4908 // HandleScope limit has changed. Delete allocated extensions.
4909 __ bind(&delete_allocated_handles);
4910 __ movp(Operand(base_reg, kLimitOffset), prev_limit_reg);
4911 __ movp(prev_limit_reg, rax);
4912 __ LoadAddress(arg_reg_1, ExternalReference::isolate_address(isolate));
4914 ExternalReference::delete_handle_scope_extensions(isolate));
4916 __ movp(rax, prev_limit_reg);
4917 __ jmp(&leave_exit_frame);
4921 static void CallApiFunctionStubHelper(MacroAssembler* masm,
4922 const ParameterCount& argc,
4923 bool return_first_arg,
4924 bool call_data_undefined) {
4925 // ----------- S t a t e -------------
4927 // -- rbx : call_data
4929 // -- rdx : api_function_address
4931 // -- rax : number of arguments if argc is a register
4932 // -- rsp[0] : return address
4933 // -- rsp[8] : last argument
4935 // -- rsp[argc * 8] : first argument
4936 // -- rsp[(argc + 1) * 8] : receiver
4937 // -----------------------------------
4939 Register callee = rdi;
4940 Register call_data = rbx;
4941 Register holder = rcx;
4942 Register api_function_address = rdx;
4943 Register context = rsi;
4944 Register return_address = r8;
4946 typedef FunctionCallbackArguments FCA;
4948 STATIC_ASSERT(FCA::kContextSaveIndex == 6);
4949 STATIC_ASSERT(FCA::kCalleeIndex == 5);
4950 STATIC_ASSERT(FCA::kDataIndex == 4);
4951 STATIC_ASSERT(FCA::kReturnValueOffset == 3);
4952 STATIC_ASSERT(FCA::kReturnValueDefaultValueIndex == 2);
4953 STATIC_ASSERT(FCA::kIsolateIndex == 1);
4954 STATIC_ASSERT(FCA::kHolderIndex == 0);
4955 STATIC_ASSERT(FCA::kArgsLength == 7);
4957 DCHECK(argc.is_immediate() || rax.is(argc.reg()));
4959 __ PopReturnAddressTo(return_address);
4969 Register scratch = call_data;
4970 if (!call_data_undefined) {
4971 __ LoadRoot(scratch, Heap::kUndefinedValueRootIndex);
4975 // return value default
4978 __ Move(scratch, ExternalReference::isolate_address(masm->isolate()));
4983 __ movp(scratch, rsp);
4984 // Push return address back on stack.
4985 __ PushReturnAddressFrom(return_address);
4987 // load context from callee
4988 __ movp(context, FieldOperand(callee, JSFunction::kContextOffset));
4990 // Allocate the v8::Arguments structure in the arguments' space since
4991 // it's not controlled by GC.
4992 const int kApiStackSpace = 4;
4994 PrepareCallApiFunction(masm, kApiStackSpace);
4996 // FunctionCallbackInfo::implicit_args_.
4997 __ movp(StackSpaceOperand(0), scratch);
4998 if (argc.is_immediate()) {
4999 __ addp(scratch, Immediate((argc.immediate() + FCA::kArgsLength - 1) *
5001 // FunctionCallbackInfo::values_.
5002 __ movp(StackSpaceOperand(1), scratch);
5003 // FunctionCallbackInfo::length_.
5004 __ Set(StackSpaceOperand(2), argc.immediate());
5005 // FunctionCallbackInfo::is_construct_call_.
5006 __ Set(StackSpaceOperand(3), 0);
5008 __ leap(scratch, Operand(scratch, argc.reg(), times_pointer_size,
5009 (FCA::kArgsLength - 1) * kPointerSize));
5010 // FunctionCallbackInfo::values_.
5011 __ movp(StackSpaceOperand(1), scratch);
5012 // FunctionCallbackInfo::length_.
5013 __ movp(StackSpaceOperand(2), argc.reg());
5014 // FunctionCallbackInfo::is_construct_call_.
5015 __ leap(argc.reg(), Operand(argc.reg(), times_pointer_size,
5016 (FCA::kArgsLength + 1) * kPointerSize));
5017 __ movp(StackSpaceOperand(3), argc.reg());
5020 #if defined(__MINGW64__) || defined(_WIN64)
5021 Register arguments_arg = rcx;
5022 Register callback_arg = rdx;
5024 Register arguments_arg = rdi;
5025 Register callback_arg = rsi;
5028 // It's okay if api_function_address == callback_arg
5029 // but not arguments_arg
5030 DCHECK(!api_function_address.is(arguments_arg));
5032 // v8::InvocationCallback's argument.
5033 __ leap(arguments_arg, StackSpaceOperand(0));
5035 ExternalReference thunk_ref =
5036 ExternalReference::invoke_function_callback(masm->isolate());
5038 // Accessor for FunctionCallbackInfo and first js arg.
5039 StackArgumentsAccessor args_from_rbp(rbp, FCA::kArgsLength + 1,
5040 ARGUMENTS_DONT_CONTAIN_RECEIVER);
5041 Operand context_restore_operand = args_from_rbp.GetArgumentOperand(
5042 FCA::kArgsLength - FCA::kContextSaveIndex);
5043 Operand is_construct_call_operand = StackSpaceOperand(3);
5044 Operand return_value_operand = args_from_rbp.GetArgumentOperand(
5045 return_first_arg ? 0 : FCA::kArgsLength - FCA::kReturnValueOffset);
5046 int stack_space = 0;
5047 Operand* stack_space_operand = &is_construct_call_operand;
5048 if (argc.is_immediate()) {
5049 stack_space = argc.immediate() + FCA::kArgsLength + 1;
5050 stack_space_operand = nullptr;
5052 CallApiFunctionAndReturn(masm, api_function_address, thunk_ref, callback_arg,
5053 stack_space, stack_space_operand,
5054 return_value_operand, &context_restore_operand);
5058 void CallApiFunctionStub::Generate(MacroAssembler* masm) {
5059 bool call_data_undefined = this->call_data_undefined();
5060 CallApiFunctionStubHelper(masm, ParameterCount(rax), false,
5061 call_data_undefined);
5065 void CallApiAccessorStub::Generate(MacroAssembler* masm) {
5066 bool is_store = this->is_store();
5067 int argc = this->argc();
5068 bool call_data_undefined = this->call_data_undefined();
5069 CallApiFunctionStubHelper(masm, ParameterCount(argc), is_store,
5070 call_data_undefined);
5074 void CallApiGetterStub::Generate(MacroAssembler* masm) {
5075 // ----------- S t a t e -------------
5076 // -- rsp[0] : return address
5078 // -- rsp[16 - kArgsLength*8] : PropertyCallbackArguments object
5080 // -- r8 : api_function_address
5081 // -----------------------------------
5083 #if defined(__MINGW64__) || defined(_WIN64)
5084 Register getter_arg = r8;
5085 Register accessor_info_arg = rdx;
5086 Register name_arg = rcx;
5088 Register getter_arg = rdx;
5089 Register accessor_info_arg = rsi;
5090 Register name_arg = rdi;
5092 Register api_function_address = ApiGetterDescriptor::function_address();
5093 DCHECK(api_function_address.is(r8));
5094 Register scratch = rax;
5096 // v8::Arguments::values_ and handler for name.
5097 const int kStackSpace = PropertyCallbackArguments::kArgsLength + 1;
5099 // Allocate v8::AccessorInfo in non-GCed stack space.
5100 const int kArgStackSpace = 1;
5102 __ leap(name_arg, Operand(rsp, kPCOnStackSize));
5104 PrepareCallApiFunction(masm, kArgStackSpace);
5105 __ leap(scratch, Operand(name_arg, 1 * kPointerSize));
5107 // v8::PropertyAccessorInfo::args_.
5108 __ movp(StackSpaceOperand(0), scratch);
5110 // The context register (rsi) has been saved in PrepareCallApiFunction and
5111 // could be used to pass arguments.
5112 __ leap(accessor_info_arg, StackSpaceOperand(0));
5114 ExternalReference thunk_ref =
5115 ExternalReference::invoke_accessor_getter_callback(isolate());
5117 // It's okay if api_function_address == getter_arg
5118 // but not accessor_info_arg or name_arg
5119 DCHECK(!api_function_address.is(accessor_info_arg) &&
5120 !api_function_address.is(name_arg));
5122 // The name handler is counted as an argument.
5123 StackArgumentsAccessor args(rbp, PropertyCallbackArguments::kArgsLength);
5124 Operand return_value_operand = args.GetArgumentOperand(
5125 PropertyCallbackArguments::kArgsLength - 1 -
5126 PropertyCallbackArguments::kReturnValueOffset);
5127 CallApiFunctionAndReturn(masm, api_function_address, thunk_ref, getter_arg,
5128 kStackSpace, nullptr, return_value_operand, NULL);
5134 } } // namespace v8::internal
5136 #endif // V8_TARGET_ARCH_X64