1 // Copyright 2011 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #ifndef V8_IA32_CODE_STUBS_IA32_H_
6 #define V8_IA32_CODE_STUBS_IA32_H_
8 #include "macro-assembler.h"
15 void ArrayNativeCode(MacroAssembler* masm,
17 Label* call_generic_code);
20 class StoreBufferOverflowStub: public PlatformCodeStub {
22 StoreBufferOverflowStub(Isolate* isolate, SaveFPRegsMode save_fp)
23 : PlatformCodeStub(isolate), save_doubles_(save_fp) {
24 ASSERT(CpuFeatures::IsSafeForSnapshot(isolate, SSE2) ||
25 save_fp == kDontSaveFPRegs);
28 void Generate(MacroAssembler* masm);
30 static void GenerateFixedRegStubsAheadOfTime(Isolate* isolate);
31 virtual bool SometimesSetsUpAFrame() { return false; }
34 SaveFPRegsMode save_doubles_;
36 Major MajorKey() { return StoreBufferOverflow; }
37 int MinorKey() { return (save_doubles_ == kSaveFPRegs) ? 1 : 0; }
41 class StringHelper : public AllStatic {
43 // Generate code for copying characters using the rep movs instruction.
44 // Copies ecx characters from esi to edi. Copying of overlapping regions is
46 static void GenerateCopyCharactersREP(MacroAssembler* masm,
47 Register dest, // Must be edi.
48 Register src, // Must be esi.
49 Register count, // Must be ecx.
50 Register scratch, // Neither of above.
53 // Generate string hash.
54 static void GenerateHashInit(MacroAssembler* masm,
58 static void GenerateHashAddCharacter(MacroAssembler* masm,
62 static void GenerateHashGetHash(MacroAssembler* masm,
67 DISALLOW_IMPLICIT_CONSTRUCTORS(StringHelper);
71 class SubStringStub: public PlatformCodeStub {
73 explicit SubStringStub(Isolate* isolate) : PlatformCodeStub(isolate) {}
76 Major MajorKey() { return SubString; }
77 int MinorKey() { return 0; }
79 void Generate(MacroAssembler* masm);
83 class StringCompareStub: public PlatformCodeStub {
85 explicit StringCompareStub(Isolate* isolate) : PlatformCodeStub(isolate) { }
87 // Compares two flat ASCII strings and returns result in eax.
88 static void GenerateCompareFlatAsciiStrings(MacroAssembler* masm,
95 // Compares two flat ASCII strings for equality and returns result
97 static void GenerateFlatAsciiStringEquals(MacroAssembler* masm,
104 virtual Major MajorKey() { return StringCompare; }
105 virtual int MinorKey() { return 0; }
106 virtual void Generate(MacroAssembler* masm);
108 static void GenerateAsciiCharsCompareLoop(
109 MacroAssembler* masm,
114 Label* chars_not_equal,
115 Label::Distance chars_not_equal_near = Label::kFar);
119 class NameDictionaryLookupStub: public PlatformCodeStub {
121 enum LookupMode { POSITIVE_LOOKUP, NEGATIVE_LOOKUP };
123 NameDictionaryLookupStub(Isolate* isolate,
128 : PlatformCodeStub(isolate),
129 dictionary_(dictionary), result_(result), index_(index), mode_(mode) { }
131 void Generate(MacroAssembler* masm);
133 static void GenerateNegativeLookup(MacroAssembler* masm,
140 static void GeneratePositiveLookup(MacroAssembler* masm,
148 virtual bool SometimesSetsUpAFrame() { return false; }
151 static const int kInlinedProbes = 4;
152 static const int kTotalProbes = 20;
154 static const int kCapacityOffset =
155 NameDictionary::kHeaderSize +
156 NameDictionary::kCapacityIndex * kPointerSize;
158 static const int kElementsStartOffset =
159 NameDictionary::kHeaderSize +
160 NameDictionary::kElementsStartIndex * kPointerSize;
162 Major MajorKey() { return NameDictionaryLookup; }
165 return DictionaryBits::encode(dictionary_.code()) |
166 ResultBits::encode(result_.code()) |
167 IndexBits::encode(index_.code()) |
168 LookupModeBits::encode(mode_);
171 class DictionaryBits: public BitField<int, 0, 3> {};
172 class ResultBits: public BitField<int, 3, 3> {};
173 class IndexBits: public BitField<int, 6, 3> {};
174 class LookupModeBits: public BitField<LookupMode, 9, 1> {};
176 Register dictionary_;
183 class RecordWriteStub: public PlatformCodeStub {
185 RecordWriteStub(Isolate* isolate,
189 RememberedSetAction remembered_set_action,
190 SaveFPRegsMode fp_mode)
191 : PlatformCodeStub(isolate),
195 remembered_set_action_(remembered_set_action),
196 save_fp_regs_mode_(fp_mode),
197 regs_(object, // An input reg.
198 address, // An input reg.
199 value) { // One scratch reg.
200 ASSERT(CpuFeatures::IsSafeForSnapshot(isolate, SSE2) ||
201 fp_mode == kDontSaveFPRegs);
207 INCREMENTAL_COMPACTION
210 virtual bool SometimesSetsUpAFrame() { return false; }
212 static const byte kTwoByteNopInstruction = 0x3c; // Cmpb al, #imm8.
213 static const byte kTwoByteJumpInstruction = 0xeb; // Jmp #imm8.
215 static const byte kFiveByteNopInstruction = 0x3d; // Cmpl eax, #imm32.
216 static const byte kFiveByteJumpInstruction = 0xe9; // Jmp #imm32.
218 static Mode GetMode(Code* stub) {
219 byte first_instruction = stub->instruction_start()[0];
220 byte second_instruction = stub->instruction_start()[2];
222 if (first_instruction == kTwoByteJumpInstruction) {
226 ASSERT(first_instruction == kTwoByteNopInstruction);
228 if (second_instruction == kFiveByteJumpInstruction) {
229 return INCREMENTAL_COMPACTION;
232 ASSERT(second_instruction == kFiveByteNopInstruction);
234 return STORE_BUFFER_ONLY;
237 static void Patch(Code* stub, Mode mode) {
239 case STORE_BUFFER_ONLY:
240 ASSERT(GetMode(stub) == INCREMENTAL ||
241 GetMode(stub) == INCREMENTAL_COMPACTION);
242 stub->instruction_start()[0] = kTwoByteNopInstruction;
243 stub->instruction_start()[2] = kFiveByteNopInstruction;
246 ASSERT(GetMode(stub) == STORE_BUFFER_ONLY);
247 stub->instruction_start()[0] = kTwoByteJumpInstruction;
249 case INCREMENTAL_COMPACTION:
250 ASSERT(GetMode(stub) == STORE_BUFFER_ONLY);
251 stub->instruction_start()[0] = kTwoByteNopInstruction;
252 stub->instruction_start()[2] = kFiveByteJumpInstruction;
255 ASSERT(GetMode(stub) == mode);
256 CPU::FlushICache(stub->instruction_start(), 7);
260 // This is a helper class for freeing up 3 scratch registers, where the third
261 // is always ecx (needed for shift operations). The input is two registers
262 // that must be preserved and one scratch register provided by the caller.
263 class RegisterAllocation {
265 RegisterAllocation(Register object,
268 : object_orig_(object),
269 address_orig_(address),
270 scratch0_orig_(scratch0),
273 scratch0_(scratch0) {
274 ASSERT(!AreAliased(scratch0, object, address, no_reg));
275 scratch1_ = GetRegThatIsNotEcxOr(object_, address_, scratch0_);
276 if (scratch0.is(ecx)) {
277 scratch0_ = GetRegThatIsNotEcxOr(object_, address_, scratch1_);
279 if (object.is(ecx)) {
280 object_ = GetRegThatIsNotEcxOr(address_, scratch0_, scratch1_);
282 if (address.is(ecx)) {
283 address_ = GetRegThatIsNotEcxOr(object_, scratch0_, scratch1_);
285 ASSERT(!AreAliased(scratch0_, object_, address_, ecx));
288 void Save(MacroAssembler* masm) {
289 ASSERT(!address_orig_.is(object_));
290 ASSERT(object_.is(object_orig_) || address_.is(address_orig_));
291 ASSERT(!AreAliased(object_, address_, scratch1_, scratch0_));
292 ASSERT(!AreAliased(object_orig_, address_, scratch1_, scratch0_));
293 ASSERT(!AreAliased(object_, address_orig_, scratch1_, scratch0_));
294 // We don't have to save scratch0_orig_ because it was given to us as
295 // a scratch register. But if we had to switch to a different reg then
296 // we should save the new scratch0_.
297 if (!scratch0_.is(scratch0_orig_)) masm->push(scratch0_);
298 if (!ecx.is(scratch0_orig_) &&
299 !ecx.is(object_orig_) &&
300 !ecx.is(address_orig_)) {
303 masm->push(scratch1_);
304 if (!address_.is(address_orig_)) {
305 masm->push(address_);
306 masm->mov(address_, address_orig_);
308 if (!object_.is(object_orig_)) {
310 masm->mov(object_, object_orig_);
314 void Restore(MacroAssembler* masm) {
315 // These will have been preserved the entire time, so we just need to move
316 // them back. Only in one case is the orig_ reg different from the plain
317 // one, since only one of them can alias with ecx.
318 if (!object_.is(object_orig_)) {
319 masm->mov(object_orig_, object_);
322 if (!address_.is(address_orig_)) {
323 masm->mov(address_orig_, address_);
326 masm->pop(scratch1_);
327 if (!ecx.is(scratch0_orig_) &&
328 !ecx.is(object_orig_) &&
329 !ecx.is(address_orig_)) {
332 if (!scratch0_.is(scratch0_orig_)) masm->pop(scratch0_);
335 // If we have to call into C then we need to save and restore all caller-
336 // saved registers that were not already preserved. The caller saved
337 // registers are eax, ecx and edx. The three scratch registers (incl. ecx)
338 // will be restored by other means so we don't bother pushing them here.
339 void SaveCallerSaveRegisters(MacroAssembler* masm, SaveFPRegsMode mode) {
340 if (!scratch0_.is(eax) && !scratch1_.is(eax)) masm->push(eax);
341 if (!scratch0_.is(edx) && !scratch1_.is(edx)) masm->push(edx);
342 if (mode == kSaveFPRegs) {
343 CpuFeatureScope scope(masm, SSE2);
345 Immediate(kDoubleSize * (XMMRegister::kNumRegisters - 1)));
346 // Save all XMM registers except XMM0.
347 for (int i = XMMRegister::kNumRegisters - 1; i > 0; i--) {
348 XMMRegister reg = XMMRegister::from_code(i);
349 masm->movsd(Operand(esp, (i - 1) * kDoubleSize), reg);
354 inline void RestoreCallerSaveRegisters(MacroAssembler*masm,
355 SaveFPRegsMode mode) {
356 if (mode == kSaveFPRegs) {
357 CpuFeatureScope scope(masm, SSE2);
358 // Restore all XMM registers except XMM0.
359 for (int i = XMMRegister::kNumRegisters - 1; i > 0; i--) {
360 XMMRegister reg = XMMRegister::from_code(i);
361 masm->movsd(reg, Operand(esp, (i - 1) * kDoubleSize));
364 Immediate(kDoubleSize * (XMMRegister::kNumRegisters - 1)));
366 if (!scratch0_.is(edx) && !scratch1_.is(edx)) masm->pop(edx);
367 if (!scratch0_.is(eax) && !scratch1_.is(eax)) masm->pop(eax);
370 inline Register object() { return object_; }
371 inline Register address() { return address_; }
372 inline Register scratch0() { return scratch0_; }
373 inline Register scratch1() { return scratch1_; }
376 Register object_orig_;
377 Register address_orig_;
378 Register scratch0_orig_;
383 // Third scratch register is always ecx.
385 Register GetRegThatIsNotEcxOr(Register r1,
388 for (int i = 0; i < Register::NumAllocatableRegisters(); i++) {
389 Register candidate = Register::FromAllocationIndex(i);
390 if (candidate.is(ecx)) continue;
391 if (candidate.is(r1)) continue;
392 if (candidate.is(r2)) continue;
393 if (candidate.is(r3)) continue;
399 friend class RecordWriteStub;
402 enum OnNoNeedToInformIncrementalMarker {
403 kReturnOnNoNeedToInformIncrementalMarker,
404 kUpdateRememberedSetOnNoNeedToInformIncrementalMarker
407 void Generate(MacroAssembler* masm);
408 void GenerateIncremental(MacroAssembler* masm, Mode mode);
409 void CheckNeedsToInformIncrementalMarker(
410 MacroAssembler* masm,
411 OnNoNeedToInformIncrementalMarker on_no_need,
413 void InformIncrementalMarker(MacroAssembler* masm);
415 Major MajorKey() { return RecordWrite; }
418 return ObjectBits::encode(object_.code()) |
419 ValueBits::encode(value_.code()) |
420 AddressBits::encode(address_.code()) |
421 RememberedSetActionBits::encode(remembered_set_action_) |
422 SaveFPRegsModeBits::encode(save_fp_regs_mode_);
425 void Activate(Code* code) {
426 code->GetHeap()->incremental_marking()->ActivateGeneratedStub(code);
429 class ObjectBits: public BitField<int, 0, 3> {};
430 class ValueBits: public BitField<int, 3, 3> {};
431 class AddressBits: public BitField<int, 6, 3> {};
432 class RememberedSetActionBits: public BitField<RememberedSetAction, 9, 1> {};
433 class SaveFPRegsModeBits: public BitField<SaveFPRegsMode, 10, 1> {};
438 RememberedSetAction remembered_set_action_;
439 SaveFPRegsMode save_fp_regs_mode_;
440 RegisterAllocation regs_;
444 } } // namespace v8::internal
446 #endif // V8_IA32_CODE_STUBS_IA32_H_