1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
5 #ifndef V8_ARM_CODE_STUBS_ARM_H_
6 #define V8_ARM_CODE_STUBS_ARM_H_
8 #include "src/arm/frames-arm.h"
14 void ArrayNativeCode(MacroAssembler* masm, Label* call_generic_code);
17 class StringHelper : public AllStatic {
19 // Generate code for copying a large number of characters. This function
20 // is allowed to spend extra time setting up conditions to make copying
21 // faster. Copying of overlapping regions is not supported.
22 // Dest register ends at the position after the last character written.
23 static void GenerateCopyCharacters(MacroAssembler* masm,
28 String::Encoding encoding);
30 // Compares two flat one-byte strings and returns result in r0.
31 static void GenerateCompareFlatOneByteStrings(
32 MacroAssembler* masm, Register left, Register right, Register scratch1,
33 Register scratch2, Register scratch3, Register scratch4);
35 // Compares two flat one-byte strings for equality and returns result in r0.
36 static void GenerateFlatOneByteStringEquals(MacroAssembler* masm,
37 Register left, Register right,
43 static void GenerateOneByteCharsCompareLoop(
44 MacroAssembler* masm, Register left, Register right, Register length,
45 Register scratch1, Register scratch2, Label* chars_not_equal);
47 DISALLOW_IMPLICIT_CONSTRUCTORS(StringHelper);
51 class RecordWriteStub: public PlatformCodeStub {
53 RecordWriteStub(Isolate* isolate,
57 RememberedSetAction remembered_set_action,
58 SaveFPRegsMode fp_mode)
59 : PlatformCodeStub(isolate),
60 regs_(object, // An input reg.
61 address, // An input reg.
62 value) { // One scratch reg.
63 minor_key_ = ObjectBits::encode(object.code()) |
64 ValueBits::encode(value.code()) |
65 AddressBits::encode(address.code()) |
66 RememberedSetActionBits::encode(remembered_set_action) |
67 SaveFPRegsModeBits::encode(fp_mode);
70 RecordWriteStub(uint32_t key, Isolate* isolate)
71 : PlatformCodeStub(key, isolate), regs_(object(), address(), value()) {}
76 INCREMENTAL_COMPACTION
79 bool SometimesSetsUpAFrame() override { return false; }
81 static void PatchBranchIntoNop(MacroAssembler* masm, int pos) {
82 masm->instr_at_put(pos, (masm->instr_at(pos) & ~B27) | (B24 | B20));
83 DCHECK(Assembler::IsTstImmediate(masm->instr_at(pos)));
86 static void PatchNopIntoBranch(MacroAssembler* masm, int pos) {
87 masm->instr_at_put(pos, (masm->instr_at(pos) & ~(B24 | B20)) | B27);
88 DCHECK(Assembler::IsBranch(masm->instr_at(pos)));
91 static Mode GetMode(Code* stub) {
92 Instr first_instruction = Assembler::instr_at(stub->instruction_start());
93 Instr second_instruction = Assembler::instr_at(stub->instruction_start() +
94 Assembler::kInstrSize);
96 if (Assembler::IsBranch(first_instruction)) {
100 DCHECK(Assembler::IsTstImmediate(first_instruction));
102 if (Assembler::IsBranch(second_instruction)) {
103 return INCREMENTAL_COMPACTION;
106 DCHECK(Assembler::IsTstImmediate(second_instruction));
108 return STORE_BUFFER_ONLY;
111 static void Patch(Code* stub, Mode mode) {
112 MacroAssembler masm(NULL,
113 stub->instruction_start(),
114 stub->instruction_size());
116 case STORE_BUFFER_ONLY:
117 DCHECK(GetMode(stub) == INCREMENTAL ||
118 GetMode(stub) == INCREMENTAL_COMPACTION);
119 PatchBranchIntoNop(&masm, 0);
120 PatchBranchIntoNop(&masm, Assembler::kInstrSize);
123 DCHECK(GetMode(stub) == STORE_BUFFER_ONLY);
124 PatchNopIntoBranch(&masm, 0);
126 case INCREMENTAL_COMPACTION:
127 DCHECK(GetMode(stub) == STORE_BUFFER_ONLY);
128 PatchNopIntoBranch(&masm, Assembler::kInstrSize);
131 DCHECK(GetMode(stub) == mode);
132 Assembler::FlushICache(stub->GetIsolate(), stub->instruction_start(),
133 2 * Assembler::kInstrSize);
136 DEFINE_NULL_CALL_INTERFACE_DESCRIPTOR();
139 // This is a helper class for freeing up 3 scratch registers. The input is
140 // two registers that must be preserved and one scratch register provided by
142 class RegisterAllocation {
144 RegisterAllocation(Register object,
149 scratch0_(scratch0) {
150 DCHECK(!AreAliased(scratch0, object, address, no_reg));
151 scratch1_ = GetRegisterThatIsNotOneOf(object_, address_, scratch0_);
154 void Save(MacroAssembler* masm) {
155 DCHECK(!AreAliased(object_, address_, scratch1_, scratch0_));
156 // We don't have to save scratch0_ because it was given to us as
157 // a scratch register.
158 masm->push(scratch1_);
161 void Restore(MacroAssembler* masm) {
162 masm->pop(scratch1_);
165 // If we have to call into C then we need to save and restore all caller-
166 // saved registers that were not already preserved. The scratch registers
167 // will be restored by other means so we don't bother pushing them here.
168 void SaveCallerSaveRegisters(MacroAssembler* masm, SaveFPRegsMode mode) {
169 masm->stm(db_w, sp, (kCallerSaved | lr.bit()) & ~scratch1_.bit());
170 if (mode == kSaveFPRegs) {
171 masm->SaveFPRegs(sp, scratch0_);
175 inline void RestoreCallerSaveRegisters(MacroAssembler*masm,
176 SaveFPRegsMode mode) {
177 if (mode == kSaveFPRegs) {
178 masm->RestoreFPRegs(sp, scratch0_);
180 masm->ldm(ia_w, sp, (kCallerSaved | lr.bit()) & ~scratch1_.bit());
183 inline Register object() { return object_; }
184 inline Register address() { return address_; }
185 inline Register scratch0() { return scratch0_; }
186 inline Register scratch1() { return scratch1_; }
194 friend class RecordWriteStub;
197 enum OnNoNeedToInformIncrementalMarker {
198 kReturnOnNoNeedToInformIncrementalMarker,
199 kUpdateRememberedSetOnNoNeedToInformIncrementalMarker
202 inline Major MajorKey() const final { return RecordWrite; }
204 void Generate(MacroAssembler* masm) override;
205 void GenerateIncremental(MacroAssembler* masm, Mode mode);
206 void CheckNeedsToInformIncrementalMarker(
207 MacroAssembler* masm,
208 OnNoNeedToInformIncrementalMarker on_no_need,
210 void InformIncrementalMarker(MacroAssembler* masm);
212 void Activate(Code* code) override {
213 code->GetHeap()->incremental_marking()->ActivateGeneratedStub(code);
216 Register object() const {
217 return Register::from_code(ObjectBits::decode(minor_key_));
220 Register value() const {
221 return Register::from_code(ValueBits::decode(minor_key_));
224 Register address() const {
225 return Register::from_code(AddressBits::decode(minor_key_));
228 RememberedSetAction remembered_set_action() const {
229 return RememberedSetActionBits::decode(minor_key_);
232 SaveFPRegsMode save_fp_regs_mode() const {
233 return SaveFPRegsModeBits::decode(minor_key_);
236 class ObjectBits: public BitField<int, 0, 4> {};
237 class ValueBits: public BitField<int, 4, 4> {};
238 class AddressBits: public BitField<int, 8, 4> {};
239 class RememberedSetActionBits: public BitField<RememberedSetAction, 12, 1> {};
240 class SaveFPRegsModeBits: public BitField<SaveFPRegsMode, 13, 1> {};
243 RegisterAllocation regs_;
245 DISALLOW_COPY_AND_ASSIGN(RecordWriteStub);
249 // Trampoline stub to call into native code. To call safely into native code
250 // in the presence of compacting GC (which can move code objects) we need to
251 // keep the code which called into native pinned in the memory. Currently the
252 // simplest approach is to generate such stub early enough so it can never be
254 class DirectCEntryStub: public PlatformCodeStub {
256 explicit DirectCEntryStub(Isolate* isolate) : PlatformCodeStub(isolate) {}
257 void GenerateCall(MacroAssembler* masm, Register target);
260 bool NeedsImmovableCode() override { return true; }
262 DEFINE_NULL_CALL_INTERFACE_DESCRIPTOR();
263 DEFINE_PLATFORM_CODE_STUB(DirectCEntry, PlatformCodeStub);
267 class NameDictionaryLookupStub: public PlatformCodeStub {
269 enum LookupMode { POSITIVE_LOOKUP, NEGATIVE_LOOKUP };
271 NameDictionaryLookupStub(Isolate* isolate, LookupMode mode)
272 : PlatformCodeStub(isolate) {
273 minor_key_ = LookupModeBits::encode(mode);
276 static void GenerateNegativeLookup(MacroAssembler* masm,
284 static void GeneratePositiveLookup(MacroAssembler* masm,
292 bool SometimesSetsUpAFrame() override { return false; }
295 static const int kInlinedProbes = 4;
296 static const int kTotalProbes = 20;
298 static const int kCapacityOffset =
299 NameDictionary::kHeaderSize +
300 NameDictionary::kCapacityIndex * kPointerSize;
302 static const int kElementsStartOffset =
303 NameDictionary::kHeaderSize +
304 NameDictionary::kElementsStartIndex * kPointerSize;
306 LookupMode mode() const { return LookupModeBits::decode(minor_key_); }
308 class LookupModeBits: public BitField<LookupMode, 0, 1> {};
310 DEFINE_NULL_CALL_INTERFACE_DESCRIPTOR();
311 DEFINE_PLATFORM_CODE_STUB(NameDictionaryLookup, PlatformCodeStub);
314 } // namespace internal
317 #endif // V8_ARM_CODE_STUBS_ARM_H_