1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
9 #include "src/codegen.h"
10 #include "src/heap/heap.h"
11 #include "src/macro-assembler.h"
17 // -------------------------------------------------------------------------
18 // Platform-specific RuntimeCallHelper functions.
20 void StubRuntimeCallHelper::BeforeCall(MacroAssembler* masm) const {
21 masm->EnterFrame(StackFrame::INTERNAL);
22 DCHECK(!masm->has_frame());
23 masm->set_has_frame(true);
27 void StubRuntimeCallHelper::AfterCall(MacroAssembler* masm) const {
28 masm->LeaveFrame(StackFrame::INTERNAL);
29 DCHECK(masm->has_frame());
30 masm->set_has_frame(false);
37 UnaryMathFunction CreateExpFunction() {
43 UnaryMathFunction CreateSqrtFunction() {
49 // Helper functions for CreateMemMoveFunction.
51 #define __ ACCESS_MASM(masm)
53 enum Direction { FORWARD, BACKWARD };
54 enum Alignment { MOVE_ALIGNED, MOVE_UNALIGNED };
57 void MemMoveEmitPopAndReturn(MacroAssembler* masm) {
68 class LabelConverter {
70 explicit LabelConverter(byte* buffer) : buffer_(buffer) {}
71 int32_t address(Label* l) const {
72 return reinterpret_cast<int32_t>(buffer_) + l->pos();
79 MemMoveFunction CreateMemMoveFunction() {
81 // Allocate buffer in executable space.
83 static_cast<byte*>(base::OS::Allocate(1 * KB, &actual_size, true));
84 if (buffer == NULL) return NULL;
85 MacroAssembler masm(NULL, buffer, static_cast<int>(actual_size));
86 LabelConverter conv(buffer);
88 // Generated code is put into a fixed, unmovable buffer, and not into
89 // the V8 heap. We can't, and don't, refer to any relocatable addresses
90 // (e.g. the JavaScript nan-object).
92 // 32-bit C declaration function calls pass arguments on stack.
95 // esp[12]: Third argument, size.
96 // esp[8]: Second argument, source pointer.
97 // esp[4]: First argument, destination pointer.
98 // esp[0]: return address
100 const int kDestinationOffset = 1 * kPointerSize;
101 const int kSourceOffset = 2 * kPointerSize;
102 const int kSizeOffset = 3 * kPointerSize;
104 int stack_offset = 0; // Update if we change the stack height.
106 Label backward, backward_much_overlap;
107 Label forward_much_overlap, small_size, medium_size, pop_and_return;
110 stack_offset += 2 * kPointerSize;
113 Register count = ecx;
114 __ mov(dst, Operand(esp, stack_offset + kDestinationOffset));
115 __ mov(src, Operand(esp, stack_offset + kSourceOffset));
116 __ mov(count, Operand(esp, stack_offset + kSizeOffset));
119 __ j(equal, &pop_and_return);
124 __ j(equal, &pop_and_return);
126 __ j(above, &backward);
129 // Simple forward copier.
130 Label forward_loop_1byte, forward_loop_4byte;
131 __ bind(&forward_loop_4byte);
132 __ mov(eax, Operand(src, 0));
133 __ sub(count, Immediate(4));
134 __ add(src, Immediate(4));
135 __ mov(Operand(dst, 0), eax);
136 __ add(dst, Immediate(4));
137 __ bind(&forward); // Entry point.
139 __ j(above, &forward_loop_4byte);
140 __ bind(&forward_loop_1byte);
142 __ j(below_equal, &pop_and_return);
143 __ mov_b(eax, Operand(src, 0));
146 __ mov_b(Operand(dst, 0), eax);
148 __ jmp(&forward_loop_1byte);
151 // Simple backward copier.
152 Label backward_loop_1byte, backward_loop_4byte, entry_shortcut;
157 __ j(below_equal, &entry_shortcut);
159 __ bind(&backward_loop_4byte);
160 __ sub(src, Immediate(4));
161 __ sub(count, Immediate(4));
162 __ mov(eax, Operand(src, 0));
163 __ sub(dst, Immediate(4));
164 __ mov(Operand(dst, 0), eax);
166 __ j(above, &backward_loop_4byte);
167 __ bind(&backward_loop_1byte);
169 __ j(below_equal, &pop_and_return);
170 __ bind(&entry_shortcut);
173 __ mov_b(eax, Operand(src, 0));
175 __ mov_b(Operand(dst, 0), eax);
176 __ jmp(&backward_loop_1byte);
179 __ bind(&pop_and_return);
180 MemMoveEmitPopAndReturn(&masm);
184 DCHECK(!RelocInfo::RequiresRelocation(desc));
185 CpuFeatures::FlushICache(buffer, actual_size);
186 base::OS::ProtectCode(buffer, actual_size);
187 // TODO(jkummerow): It would be nice to register this code creation event
188 // with the PROFILE / GDBJIT system.
189 return FUNCTION_CAST<MemMoveFunction>(buffer);
195 // -------------------------------------------------------------------------
198 #define __ ACCESS_MASM(masm)
201 void ElementsTransitionGenerator::GenerateMapChangeElementsTransition(
202 MacroAssembler* masm,
207 AllocationSiteMode mode,
208 Label* allocation_memento_found) {
209 Register scratch = edi;
210 DCHECK(!AreAliased(receiver, key, value, target_map, scratch));
212 if (mode == TRACK_ALLOCATION_SITE) {
213 DCHECK(allocation_memento_found != NULL);
214 __ JumpIfJSArrayHasAllocationMemento(
215 receiver, scratch, allocation_memento_found);
218 // Set transitioned map.
219 __ mov(FieldOperand(receiver, HeapObject::kMapOffset), target_map);
220 __ RecordWriteField(receiver, HeapObject::kMapOffset, target_map, scratch,
221 kDontSaveFPRegs, EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
225 void ElementsTransitionGenerator::GenerateSmiToDouble(
226 MacroAssembler* masm,
231 AllocationSiteMode mode,
233 // Return address is on the stack.
234 DCHECK(receiver.is(edx));
236 DCHECK(value.is(eax));
237 DCHECK(target_map.is(ebx));
239 Label loop, entry, convert_hole, gc_required, only_change_map;
241 if (mode == TRACK_ALLOCATION_SITE) {
242 __ JumpIfJSArrayHasAllocationMemento(edx, edi, fail);
245 // Check for empty arrays, which only require a map transition and no changes
246 // to the backing store.
247 __ mov(edi, FieldOperand(edx, JSObject::kElementsOffset));
248 __ cmp(edi, Immediate(masm->isolate()->factory()->empty_fixed_array()));
249 __ j(equal, &only_change_map);
254 __ mov(edi, FieldOperand(edi, FixedArray::kLengthOffset));
256 // Allocate new FixedDoubleArray.
258 // edi: length of source FixedArray (smi-tagged)
259 AllocationFlags flags =
260 static_cast<AllocationFlags>(TAG_OBJECT | DOUBLE_ALIGNMENT);
261 __ Allocate(FixedDoubleArray::kHeaderSize, times_8, edi,
262 REGISTER_VALUE_IS_SMI, eax, ebx, no_reg, &gc_required, flags);
264 // eax: destination FixedDoubleArray
265 // edi: number of elements
267 __ mov(FieldOperand(eax, HeapObject::kMapOffset),
268 Immediate(masm->isolate()->factory()->fixed_double_array_map()));
269 __ mov(FieldOperand(eax, FixedDoubleArray::kLengthOffset), edi);
270 __ mov(esi, FieldOperand(edx, JSObject::kElementsOffset));
271 // Replace receiver's backing store with newly created FixedDoubleArray.
272 __ mov(FieldOperand(edx, JSObject::kElementsOffset), eax);
274 __ RecordWriteField(edx, JSObject::kElementsOffset, ebx, edi, kDontSaveFPRegs,
275 EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
277 __ mov(edi, FieldOperand(esi, FixedArray::kLengthOffset));
279 // Prepare for conversion loop.
280 ExternalReference canonical_the_hole_nan_reference =
281 ExternalReference::address_of_the_hole_nan();
284 // Call into runtime if GC is required.
285 __ bind(&gc_required);
286 // Restore registers before jumping into runtime.
287 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
292 // Convert and copy elements
293 // esi: source FixedArray
295 __ mov(ebx, FieldOperand(esi, edi, times_2, FixedArray::kHeaderSize));
296 // ebx: current element from source
297 // edi: index of current element
298 __ JumpIfNotSmi(ebx, &convert_hole);
300 // Normal smi, convert it to double and store.
303 __ fild_s(Operand(esp, 0));
305 __ fstp_d(FieldOperand(eax, edi, times_4, FixedDoubleArray::kHeaderSize));
308 // Found hole, store hole_nan_as_double instead.
309 __ bind(&convert_hole);
311 if (FLAG_debug_code) {
312 __ cmp(ebx, masm->isolate()->factory()->the_hole_value());
313 __ Assert(equal, kObjectFoundInSmiOnlyArray);
316 __ fld_d(Operand::StaticVariable(canonical_the_hole_nan_reference));
317 __ fstp_d(FieldOperand(eax, edi, times_4, FixedDoubleArray::kHeaderSize));
320 __ sub(edi, Immediate(Smi::FromInt(1)));
321 __ j(not_sign, &loop);
327 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
329 __ bind(&only_change_map);
332 // Set transitioned map.
333 __ mov(FieldOperand(edx, HeapObject::kMapOffset), ebx);
334 __ RecordWriteField(edx, HeapObject::kMapOffset, ebx, edi, kDontSaveFPRegs,
335 OMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
339 void ElementsTransitionGenerator::GenerateDoubleToObject(
340 MacroAssembler* masm,
345 AllocationSiteMode mode,
347 // Return address is on the stack.
348 DCHECK(receiver.is(edx));
350 DCHECK(value.is(eax));
351 DCHECK(target_map.is(ebx));
353 Label loop, entry, convert_hole, gc_required, only_change_map, success;
355 if (mode == TRACK_ALLOCATION_SITE) {
356 __ JumpIfJSArrayHasAllocationMemento(edx, edi, fail);
359 // Check for empty arrays, which only require a map transition and no changes
360 // to the backing store.
361 __ mov(edi, FieldOperand(edx, JSObject::kElementsOffset));
362 __ cmp(edi, Immediate(masm->isolate()->factory()->empty_fixed_array()));
363 __ j(equal, &only_change_map);
369 __ mov(ebx, FieldOperand(edi, FixedDoubleArray::kLengthOffset));
371 // Allocate new FixedArray.
372 // ebx: length of source FixedDoubleArray (smi-tagged)
373 __ lea(edi, Operand(ebx, times_2, FixedArray::kHeaderSize));
374 __ Allocate(edi, eax, esi, no_reg, &gc_required, TAG_OBJECT);
376 // eax: destination FixedArray
377 // ebx: number of elements
378 __ mov(FieldOperand(eax, HeapObject::kMapOffset),
379 Immediate(masm->isolate()->factory()->fixed_array_map()));
380 __ mov(FieldOperand(eax, FixedArray::kLengthOffset), ebx);
381 __ mov(edi, FieldOperand(edx, JSObject::kElementsOffset));
387 // Set transitioned map.
388 __ bind(&only_change_map);
389 __ mov(FieldOperand(edx, HeapObject::kMapOffset), ebx);
390 __ RecordWriteField(edx, HeapObject::kMapOffset, ebx, edi, kDontSaveFPRegs,
391 OMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
394 // Call into runtime if GC is required.
395 __ bind(&gc_required);
396 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
402 // Box doubles into heap numbers.
403 // edi: source FixedDoubleArray
404 // eax: destination FixedArray
406 // ebx: index of current element (smi-tagged)
407 uint32_t offset = FixedDoubleArray::kHeaderSize + sizeof(kHoleNanLower32);
408 __ cmp(FieldOperand(edi, ebx, times_4, offset), Immediate(kHoleNanUpper32));
409 __ j(equal, &convert_hole);
411 // Non-hole double, copy value into a heap number.
412 __ AllocateHeapNumber(edx, esi, no_reg, &gc_required);
413 // edx: new heap number
414 __ mov(esi, FieldOperand(edi, ebx, times_4, FixedDoubleArray::kHeaderSize));
415 __ mov(FieldOperand(edx, HeapNumber::kValueOffset), esi);
416 __ mov(esi, FieldOperand(edi, ebx, times_4, offset));
417 __ mov(FieldOperand(edx, HeapNumber::kValueOffset + kPointerSize), esi);
418 __ mov(FieldOperand(eax, ebx, times_2, FixedArray::kHeaderSize), edx);
420 __ RecordWriteArray(eax, edx, esi, kDontSaveFPRegs, EMIT_REMEMBERED_SET,
422 __ jmp(&entry, Label::kNear);
424 // Replace the-hole NaN with the-hole pointer.
425 __ bind(&convert_hole);
426 __ mov(FieldOperand(eax, ebx, times_2, FixedArray::kHeaderSize),
427 masm->isolate()->factory()->the_hole_value());
430 __ sub(ebx, Immediate(Smi::FromInt(1)));
431 __ j(not_sign, &loop);
437 // Set transitioned map.
438 __ mov(FieldOperand(edx, HeapObject::kMapOffset), ebx);
439 __ RecordWriteField(edx, HeapObject::kMapOffset, ebx, edi, kDontSaveFPRegs,
440 OMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
441 // Replace receiver's backing store with newly created and filled FixedArray.
442 __ mov(FieldOperand(edx, JSObject::kElementsOffset), eax);
443 __ RecordWriteField(edx, JSObject::kElementsOffset, eax, edi, kDontSaveFPRegs,
444 EMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
446 // Restore registers.
448 __ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
454 void StringCharLoadGenerator::Generate(MacroAssembler* masm,
459 Label* call_runtime) {
460 // Fetch the instance type of the receiver into result register.
461 __ mov(result, FieldOperand(string, HeapObject::kMapOffset));
462 __ movzx_b(result, FieldOperand(result, Map::kInstanceTypeOffset));
464 // We need special handling for indirect strings.
465 Label check_sequential;
466 __ test(result, Immediate(kIsIndirectStringMask));
467 __ j(zero, &check_sequential, Label::kNear);
469 // Dispatch on the indirect string shape: slice or cons.
471 __ test(result, Immediate(kSlicedNotConsMask));
472 __ j(zero, &cons_string, Label::kNear);
475 Label indirect_string_loaded;
476 __ mov(result, FieldOperand(string, SlicedString::kOffsetOffset));
478 __ add(index, result);
479 __ mov(string, FieldOperand(string, SlicedString::kParentOffset));
480 __ jmp(&indirect_string_loaded, Label::kNear);
482 // Handle cons strings.
483 // Check whether the right hand side is the empty string (i.e. if
484 // this is really a flat string in a cons string). If that is not
485 // the case we would rather go to the runtime system now to flatten
487 __ bind(&cons_string);
488 __ cmp(FieldOperand(string, ConsString::kSecondOffset),
489 Immediate(factory->empty_string()));
490 __ j(not_equal, call_runtime);
491 __ mov(string, FieldOperand(string, ConsString::kFirstOffset));
493 __ bind(&indirect_string_loaded);
494 __ mov(result, FieldOperand(string, HeapObject::kMapOffset));
495 __ movzx_b(result, FieldOperand(result, Map::kInstanceTypeOffset));
497 // Distinguish sequential and external strings. Only these two string
498 // representations can reach here (slices and flat cons strings have been
499 // reduced to the underlying sequential or external string).
501 __ bind(&check_sequential);
502 STATIC_ASSERT(kSeqStringTag == 0);
503 __ test(result, Immediate(kStringRepresentationMask));
504 __ j(zero, &seq_string, Label::kNear);
506 // Handle external strings.
507 Label one_byte_external, done;
508 if (FLAG_debug_code) {
509 // Assert that we do not have a cons or slice (indirect strings) here.
510 // Sequential strings have already been ruled out.
511 __ test(result, Immediate(kIsIndirectStringMask));
512 __ Assert(zero, kExternalStringExpectedButNotFound);
514 // Rule out short external strings.
515 STATIC_ASSERT(kShortExternalStringTag != 0);
516 __ test_b(result, kShortExternalStringMask);
517 __ j(not_zero, call_runtime);
519 STATIC_ASSERT(kTwoByteStringTag == 0);
520 __ test_b(result, kStringEncodingMask);
521 __ mov(result, FieldOperand(string, ExternalString::kResourceDataOffset));
522 __ j(not_equal, &one_byte_external, Label::kNear);
524 __ movzx_w(result, Operand(result, index, times_2, 0));
525 __ jmp(&done, Label::kNear);
526 __ bind(&one_byte_external);
528 __ movzx_b(result, Operand(result, index, times_1, 0));
529 __ jmp(&done, Label::kNear);
531 // Dispatch on the encoding: one-byte or two-byte.
533 __ bind(&seq_string);
534 STATIC_ASSERT((kStringEncodingMask & kOneByteStringTag) != 0);
535 STATIC_ASSERT((kStringEncodingMask & kTwoByteStringTag) == 0);
536 __ test(result, Immediate(kStringEncodingMask));
537 __ j(not_zero, &one_byte, Label::kNear);
540 // Load the two-byte character code into the result register.
541 __ movzx_w(result, FieldOperand(string,
544 SeqTwoByteString::kHeaderSize));
545 __ jmp(&done, Label::kNear);
548 // Load the byte into the result register.
550 __ movzx_b(result, FieldOperand(string,
553 SeqOneByteString::kHeaderSize));
561 CodeAgingHelper::CodeAgingHelper() {
562 DCHECK(young_sequence_.length() == kNoCodeAgeSequenceLength);
563 CodePatcher patcher(young_sequence_.start(), young_sequence_.length());
564 patcher.masm()->push(ebp);
565 patcher.masm()->mov(ebp, esp);
566 patcher.masm()->push(esi);
567 patcher.masm()->push(edi);
572 bool CodeAgingHelper::IsOld(byte* candidate) const {
573 return *candidate == kCallOpcode;
578 bool Code::IsYoungSequence(Isolate* isolate, byte* sequence) {
579 bool result = isolate->code_aging_helper()->IsYoung(sequence);
580 DCHECK(result || isolate->code_aging_helper()->IsOld(sequence));
585 void Code::GetCodeAgeAndParity(Isolate* isolate, byte* sequence, Age* age,
586 MarkingParity* parity) {
587 if (IsYoungSequence(isolate, sequence)) {
588 *age = kNoAgeCodeAge;
589 *parity = NO_MARKING_PARITY;
591 sequence++; // Skip the kCallOpcode byte
592 Address target_address = sequence + *reinterpret_cast<int*>(sequence) +
593 Assembler::kCallTargetAddressOffset;
594 Code* stub = GetCodeFromTargetAddress(target_address);
595 GetCodeAgeAndParity(stub, age, parity);
600 void Code::PatchPlatformCodeAge(Isolate* isolate,
603 MarkingParity parity) {
604 uint32_t young_length = isolate->code_aging_helper()->young_sequence_length();
605 if (age == kNoAgeCodeAge) {
606 isolate->code_aging_helper()->CopyYoungSequenceTo(sequence);
607 CpuFeatures::FlushICache(sequence, young_length);
609 Code* stub = GetCodeAgeStub(isolate, age, parity);
610 CodePatcher patcher(sequence, young_length);
611 patcher.masm()->call(stub->instruction_start(), RelocInfo::NONE32);
616 } } // namespace v8::internal
618 #endif // V8_TARGET_ARCH_X87