1 // Licensed to the .NET Foundation under one or more agreements.
2 // The .NET Foundation licenses this file to you under the MIT license.
4 // VirtualCallStubCpu.hpp
6 #ifndef _VIRTUAL_CALL_STUB_RISCV64_H
7 #define _VIRTUAL_CALL_STUB_RISCV64_H
9 #define DISPATCH_STUB_FIRST_DWORD 0x00000e97
10 #define RESOLVE_STUB_FIRST_DWORD 0x00053e03
11 #define VTABLECALL_STUB_FIRST_DWORD 0x00053e83
13 #define LOOKUP_STUB_FIRST_DWORD 0x00000f97
15 #define USES_LOOKUP_STUBS 1
19 inline PCODE entryPoint() { LIMITED_METHOD_CONTRACT; return (PCODE)&_entryPoint[0]; }
20 inline size_t token() { LIMITED_METHOD_CONTRACT; return _token; }
21 inline size_t size() { LIMITED_METHOD_CONTRACT; return sizeof(LookupStub); }
23 friend struct LookupHolder;
26 PCODE _resolveWorkerTarget;
35 static void InitializeStatic() { }
37 void Initialize(LookupHolder* pLookupHolderRX, PCODE resolveWorkerTarget, size_t dispatchToken)
40 // ld t2, (12 + 12)(ra)
41 // ld t6, (4 + 12)(ra)
44 // _resolveWorkerTarget
47 _stub._entryPoint[0] = LOOKUP_STUB_FIRST_DWORD; // auipc t6, 0 //0x00000f97
48 _stub._entryPoint[1] = 0x018fb383; //ld t2, 24(t6)
49 _stub._entryPoint[2] = 0x010fbf83; //ld t6, 16(t6)
50 _stub._entryPoint[3] = 0x000f8067; //jalr x0, t6, 0
52 _stub._resolveWorkerTarget = resolveWorkerTarget;
53 _stub._token = dispatchToken;
56 LookupStub* stub() { LIMITED_METHOD_CONTRACT; return &_stub; }
57 static LookupHolder* FromLookupEntry(PCODE lookupEntry)
59 return (LookupHolder*) ( lookupEntry - offsetof(LookupHolder, _stub) - offsetof(LookupStub, _entryPoint) );
65 inline PCODE entryPoint() { LIMITED_METHOD_CONTRACT; return (PCODE)&_entryPoint[0]; }
67 inline size_t expectedMT() { LIMITED_METHOD_CONTRACT; return _expectedMT; }
68 inline PCODE implTarget() { LIMITED_METHOD_CONTRACT; return _implTarget; }
70 inline TADDR implTargetSlot(EntryPointSlots::SlotType *slotTypeRef) const
72 LIMITED_METHOD_CONTRACT;
73 _ASSERTE(slotTypeRef != nullptr);
75 *slotTypeRef = EntryPointSlots::SlotType_Executable;
76 return (TADDR)&_implTarget;
79 inline PCODE failTarget() { LIMITED_METHOD_CONTRACT; return _failTarget; }
80 inline size_t size() { LIMITED_METHOD_CONTRACT; return sizeof(DispatchStub); }
83 friend struct DispatchHolder;
93 static void InitializeStatic()
95 LIMITED_METHOD_CONTRACT;
97 // Check that _implTarget is aligned in the DispatchHolder for backpatching
98 static_assert_no_msg(((offsetof(DispatchHolder, _stub) + offsetof(DispatchStub, _implTarget)) % sizeof(void *)) == 0);
101 void Initialize(DispatchHolder* pDispatchHolderRX, PCODE implTarget, PCODE failTarget, size_t expectedMT)
104 // ld t0, 0(a0) // methodTable from object in $a0
105 // ld t6, 32(t4) // t6 _expectedMT
106 // bne t6, t0, failLabel
107 // ld t4, 40(t4) // t4 _implTarget
110 // ld t4, 48(t4) // t4 _failTarget
118 _stub._entryPoint[0] = DISPATCH_STUB_FIRST_DWORD; // auipc t4,0 // 0x00000e97
119 _stub._entryPoint[1] = 0x00053283; // ld t0, 0(a0) // methodTable from object in $a0
120 _stub._entryPoint[2] = 0x020ebf83; // ld t6, 32(t4) // t6 _expectedMT
121 _stub._entryPoint[3] = 0x005f9663; // bne t6, t0, failLabel
122 _stub._entryPoint[4] = 0x028ebe83; // ld t4, 40(t4) // t4 _implTarget
123 _stub._entryPoint[5] = 0x000e8067; // jalr x0, t4, 0
124 _stub._entryPoint[6] = 0x030ebe83; // ld t4, 48(t4) // t4 _failTarget
125 _stub._entryPoint[7] = 0x000e8067; // jalr x0, t4, 0
127 _stub._expectedMT = expectedMT;
128 _stub._implTarget = implTarget;
129 _stub._failTarget = failTarget;
132 DispatchStub* stub() { LIMITED_METHOD_CONTRACT; return &_stub; }
134 static DispatchHolder* FromDispatchEntry(PCODE dispatchEntry)
136 LIMITED_METHOD_CONTRACT;
137 DispatchHolder* dispatchHolder = (DispatchHolder*) ( dispatchEntry - offsetof(DispatchHolder, _stub) - offsetof(DispatchStub, _entryPoint) );
138 return dispatchHolder;
147 inline PCODE failEntryPoint() { LIMITED_METHOD_CONTRACT; return (PCODE)&_failEntryPoint[0]; }
148 inline PCODE resolveEntryPoint() { LIMITED_METHOD_CONTRACT; return (PCODE)&_resolveEntryPoint[0]; }
149 inline PCODE slowEntryPoint() { LIMITED_METHOD_CONTRACT; return (PCODE)&_slowEntryPoint[0]; }
150 inline size_t token() { LIMITED_METHOD_CONTRACT; return _token; }
151 inline INT32* pCounter() { LIMITED_METHOD_CONTRACT; return _pCounter; }
153 inline UINT32 hashedToken() { LIMITED_METHOD_CONTRACT; return _hashedToken >> LOG2_PTRSIZE; }
154 inline size_t cacheAddress() { LIMITED_METHOD_CONTRACT; return _cacheAddress; }
155 inline size_t size() { LIMITED_METHOD_CONTRACT; return sizeof(ResolveStub); }
158 friend struct ResolveHolder;
159 const static int resolveEntryPointLen = 20;
160 const static int slowEntryPointLen = 4;
161 const static int failEntryPointLen = 9;
163 DWORD _resolveEntryPoint[resolveEntryPointLen];
164 DWORD _slowEntryPoint[slowEntryPointLen];
165 DWORD _failEntryPoint[failEntryPointLen];
167 INT32* _pCounter; //Base of the Data Region
168 size_t _cacheAddress; // lookupCache
170 PCODE _resolveWorkerTarget;
175 static void InitializeStatic() { }
177 void Initialize(ResolveHolder* pResolveHolderRX,
178 PCODE resolveWorkerTarget, PCODE patcherTarget,
179 size_t dispatchToken, UINT32 hashedToken,
180 void * cacheAddr, INT32 * counterAddr)
185 /******** Rough Convention of used in this routine
186 ;;ra temp base address of loading data region
187 ;;t5 indirection cell
188 ;;t3 MethodTable (from object ref in a0), out: this._token
193 ;;cachemask => [CALL_STUB_CACHE_MASK * sizeof(void*)]
195 // Called directly by JITTED code
196 // ResolveStub._resolveEntryPoint(a0:Object*, a1 ...,a7, t8:IndirectionCellAndFlags)
198 // MethodTable mt = a0.m_pMethTab;
199 // int i = ((mt + mt >> 12) ^ this._hashedToken) & _cacheMask
200 // ResolveCacheElem e = this._cacheAddress + i
201 // t1 = e = this._cacheAddress + i
202 // if (mt == e.pMT && this._token == e.token)
204 // (e.target)(a0, [a1,...,a7]);
209 // (this._slowEntryPoint)(a0, [a1,.., a7], t5, t3);
214 ///;;resolveEntryPoint
215 // Called directly by JITTED code
216 // ResolveStub._resolveEntryPoint(a0:Object*, a1 ...,a7, t5:IndirectionCellAndFlags)
219 _stub._resolveEntryPoint[n++] = RESOLVE_STUB_FIRST_DWORD;
221 _stub._resolveEntryPoint[n++] = 0x00ce5293;
223 _stub._resolveEntryPoint[n++] = 0x005e0333;
225 _stub._resolveEntryPoint[n++] = 0x00000297;
227 _stub._resolveEntryPoint[n++] = 0xff428293;
229 // lw t6, 0(t0) #t6 = this._hashedToken
230 _stub._resolveEntryPoint[n++] = 0x0002af83 | (33 << 22); //(20+4+9)*4<<20;
231 _ASSERTE((ResolveStub::resolveEntryPointLen+ResolveStub::slowEntryPointLen+ResolveStub::failEntryPointLen) == 33);
232 _ASSERTE((33<<2) == (offsetof(ResolveStub, _hashedToken) -offsetof(ResolveStub, _resolveEntryPoint[0])));
235 _stub._resolveEntryPoint[n++] = 0x01f34333;
237 _ASSERTE(CALL_STUB_CACHE_MASK * sizeof(void*) == 0x7ff8);
239 _stub._resolveEntryPoint[n++] = 0x07ff8fb7;
241 _stub._resolveEntryPoint[n++] = 0x00cfdf9b;
243 _stub._resolveEntryPoint[n++] = 0x01f37333;
244 // ld t6, 0(t0) # t6 = this._cacheAddress
245 _stub._resolveEntryPoint[n++] = 0x0002bf83 | (36 << 22); //(20+4+9+1+2)*4<<20;
246 _ASSERTE((ResolveStub::resolveEntryPointLen+ResolveStub::slowEntryPointLen+ResolveStub::failEntryPointLen+1+2) == 36);
247 _ASSERTE((36<<2) == (offsetof(ResolveStub, _cacheAddress) -offsetof(ResolveStub, _resolveEntryPoint[0])));
249 _stub._resolveEntryPoint[n++] = 0x006f8333;
250 // ld t1, 0(t1) # t1 = e = this._cacheAddress[i]
251 _stub._resolveEntryPoint[n++] = 0x00033303;
253 // ld t6, 0(t1) # t6 = Check mt == e.pMT;
254 _stub._resolveEntryPoint[n++] = 0x00033f83 | ((offsetof(ResolveCacheElem, pMT) & 0xfff) << 20);
255 // ld t2, 0(t0) # $t2 = this._token
256 _stub._resolveEntryPoint[n++] = 0x0002b383 | (38<<22);//(20+4+9+1+2+2)*4<<20;
257 _ASSERTE((ResolveStub::resolveEntryPointLen+ResolveStub::slowEntryPointLen+ResolveStub::failEntryPointLen+1+4) == 38);
258 _ASSERTE((38<<2) == (offsetof(ResolveStub, _token) -offsetof(ResolveStub, _resolveEntryPoint[0])));
261 _stub._resolveEntryPoint[n++] = 0x01cf9a63;// | PC_REL_OFFSET(_slowEntryPoint[0], n);
263 // ld t6, 0(t1) # t6 = e.token;
264 _stub._resolveEntryPoint[n++] = 0x00033f83 | ((offsetof(ResolveCacheElem, token) & 0xfff)<<10);
266 _stub._resolveEntryPoint[n++] = 0x007f9663;// | PC_REL_OFFSET(_slowEntryPoint[0], n);
268 pc_offset = offsetof(ResolveCacheElem, target) & 0xffffffff;
269 _ASSERTE(pc_offset >=0 && pc_offset%8 == 0);
270 // ld t3, 0(t1) # t3 = e.target;
271 _stub._resolveEntryPoint[n++] = 0x00033e03 | ((offsetof(ResolveCacheElem, target) & 0xfff)<<10);
273 _stub._resolveEntryPoint[n++] = 0x000e0067;
275 _ASSERTE(n == ResolveStub::resolveEntryPointLen);
276 _ASSERTE(_stub._resolveEntryPoint + n == _stub._slowEntryPoint);
278 // ResolveStub._slowEntryPoint(a0:MethodToken, [a1..a7], t5:IndirectionCellAndFlags)
281 // this._resolveWorkerTarget(a0, [a1..a7], t5, t2);
283 //#undef PC_REL_OFFSET
284 //#define PC_REL_OFFSET(_member, _index) (((INT32)(offsetof(ResolveStub, _member) - (offsetof(ResolveStub, _slowEntryPoint[_index])))) & 0xffff)
286 // ;;fall through to the slow case
289 _stub._slowEntryPoint[0] = 0x00000297;
290 // ld t6, 0(t0) # r21 = _resolveWorkerTarget;
291 _ASSERTE((0x14*4) == ((INT32)(offsetof(ResolveStub, _resolveWorkerTarget) - (offsetof(ResolveStub, _slowEntryPoint[0])))));
292 _ASSERTE((ResolveStub::slowEntryPointLen + ResolveStub::failEntryPointLen+1+3*2) == 0x14);
293 _stub._slowEntryPoint[1] = 0x0002bf83 | ((0x14 * 4) << 20);
295 // ld t2, 0(t0) # t2 = this._token;
296 _stub._slowEntryPoint[2] = 0x0002b383 | ((0x12 * 4) << 20); //(18*4=72=0x48)<<20
297 _ASSERTE((ResolveStub::slowEntryPointLen+ResolveStub::failEntryPointLen+1+4)*4 == (0x12 * 4));
298 _ASSERTE((0x12 * 4) == (offsetof(ResolveStub, _token) -offsetof(ResolveStub, _slowEntryPoint[0])));
301 _stub._slowEntryPoint[3] = 0x000f8067;
303 _ASSERTE(4 == ResolveStub::slowEntryPointLen);
305 // ResolveStub._failEntryPoint(a0:MethodToken, a1,.., a7, t5:IndirectionCellAndFlags)
307 // if(--*(this._pCounter) < 0) t5 = t5 | SDF_ResolveBackPatch;
308 // this._resolveEntryPoint(a0, [a1..a7]);
310 //#undef PC_REL_OFFSET
311 //#define PC_REL_OFFSET(_member, _index) (((INT32)(offsetof(ResolveStub, _member) - (offsetof(ResolveStub, _failEntryPoint[_index])))) & 0xffff)
315 _stub._failEntryPoint[0] = 0x00000297;
316 // ld t1, 0(t0) # t1 = _pCounter; 0x2800000=((failEntryPointLen+1)*4)<<20.
317 _stub._failEntryPoint[1] = 0x0002b303 | 0x2800000;
318 _ASSERTE((((ResolveStub::failEntryPointLen+1)*4)<<20) == 0x2800000);
319 _ASSERTE((0x2800000>>20) == ((INT32)(offsetof(ResolveStub, _pCounter) - (offsetof(ResolveStub, _failEntryPoint[0])))));
321 _stub._failEntryPoint[2] = 0x00032f83;
323 _stub._failEntryPoint[3] = 0xffff8f93;
326 _stub._failEntryPoint[4] = 0x01f32023;
328 _ASSERTE(SDF_ResolveBackPatch == 0x1);
329 // ;; ori t5, t5, t6 >=0 ? SDF_ResolveBackPatch:0;
331 _stub._failEntryPoint[5] = 0x000faf93;
333 _stub._failEntryPoint[6] = 0x001fcf93;
335 _stub._failEntryPoint[7] = 0x01ff6f33;
337 // j _resolveEntryPoint // pc - 128 = pc + 4 - resolveEntryPointLen * 4 - slowEntryPointLen * 4 - failEntryPointLen * 4;
338 _stub._failEntryPoint[8] = 0xf81ff06f;
340 _ASSERTE(9 == ResolveStub::failEntryPointLen);
341 _stub._pCounter = counterAddr;
342 _stub._hashedToken = hashedToken << LOG2_PTRSIZE;
343 _stub._cacheAddress = (size_t) cacheAddr;
344 _stub._token = dispatchToken;
345 _stub._resolveWorkerTarget = resolveWorkerTarget;
347 _ASSERTE(resolveWorkerTarget == (PCODE)ResolveWorkerChainLookupAsmStub);
348 _ASSERTE(patcherTarget == NULL);
352 #undef Dataregionbase
355 ResolveStub* stub() { LIMITED_METHOD_CONTRACT; return &_stub; }
357 static ResolveHolder* FromFailEntry(PCODE failEntry);
358 static ResolveHolder* FromResolveEntry(PCODE resolveEntry);
364 /*VTableCallStub**************************************************************************************
365 These are jump stubs that perform a vtable-base virtual call. These stubs assume that an object is placed
366 in the first argument register (this pointer). From there, the stub extracts the MethodTable pointer, followed by the
367 vtable pointer, and finally jumps to the target method at a given slot in the vtable.
369 struct VTableCallStub
371 friend struct VTableCallHolder;
375 _ASSERTE(!"RISCV64:NYI");
379 inline PCODE entryPoint() const { LIMITED_METHOD_CONTRACT; return (PCODE)&_entryPoint[0]; }
381 inline size_t token()
383 LIMITED_METHOD_CONTRACT;
384 DWORD slot = *(DWORD*)(reinterpret_cast<BYTE*>(this) + size() - 4);
385 return DispatchToken::CreateDispatchToken(slot).To_SIZE_T();
389 BYTE _entryPoint[0]; // Dynamically sized stub. See Initialize() for more details.
392 /* VTableCallHolders are the containers for VTableCallStubs, they provide for any alignment of
393 stubs as necessary. */
394 struct VTableCallHolder
396 void Initialize(unsigned slot);
398 VTableCallStub* stub() { LIMITED_METHOD_CONTRACT; return reinterpret_cast<VTableCallStub *>(this); }
400 static size_t GetHolderSize(unsigned slot)
402 STATIC_CONTRACT_WRAPPER;
403 unsigned offsetOfIndirection = MethodTable::GetVtableOffset() + MethodTable::GetIndexOfVtableIndirection(slot) * TARGET_POINTER_SIZE;
404 unsigned offsetAfterIndirection = MethodTable::GetIndexAfterVtableIndirection(slot) * TARGET_POINTER_SIZE;
405 int indirectionsCodeSize = (offsetOfIndirection >= 0x1000 ? 12 : 4) + (offsetAfterIndirection >= 0x1000 ? 12 : 4);
406 int indirectionsDataSize = (offsetOfIndirection >= 0x1000 ? 4 : 0) + (offsetAfterIndirection >= 0x1000 ? 4 : 0);
407 return 12 + indirectionsCodeSize + ((indirectionsDataSize > 0) ? (indirectionsDataSize + 4) : 0);
410 static VTableCallHolder* FromVTableCallEntry(PCODE entry) { LIMITED_METHOD_CONTRACT; return (VTableCallHolder*)entry; }
413 // VTableCallStub follows here. It is dynamically sized on allocation because it could
414 // use short/long instruction sizes for LDR, depending on the slot value.
420 #ifndef DACCESS_COMPILE
421 ResolveHolder* ResolveHolder::FromFailEntry(PCODE failEntry)
423 LIMITED_METHOD_CONTRACT;
424 ResolveHolder* resolveHolder = (ResolveHolder*) ( failEntry - offsetof(ResolveHolder, _stub) - offsetof(ResolveStub, _failEntryPoint) );
425 return resolveHolder;
428 ResolveHolder* ResolveHolder::FromResolveEntry(PCODE resolveEntry)
430 LIMITED_METHOD_CONTRACT;
431 ResolveHolder* resolveHolder = (ResolveHolder*) ( resolveEntry - offsetof(ResolveHolder, _stub) - offsetof(ResolveStub, _resolveEntryPoint) );
432 return resolveHolder;
435 void VTableCallHolder::Initialize(unsigned slot)
437 unsigned offsetOfIndirection = MethodTable::GetVtableOffset() + MethodTable::GetIndexOfVtableIndirection(slot) * TARGET_POINTER_SIZE;
438 unsigned offsetAfterIndirection = MethodTable::GetIndexAfterVtableIndirection(slot) * TARGET_POINTER_SIZE;
440 VTableCallStub* pStub = stub();
441 BYTE* p = (BYTE*)pStub->entryPoint();
443 // ld t4, 0(a0) : t4 = MethodTable pointer
444 *(UINT32*)p = 0x00053e83; // VTABLECALL_STUB_FIRST_DWORD
447 if ((offsetOfIndirection >= 0x1000) || (offsetAfterIndirection >= 0x1000))
449 *(UINT32*)p = 0x00000317; // auipc t1, 0
453 if (offsetOfIndirection >= 0x1000)
455 uint dataOffset = 20 + (offsetAfterIndirection >= 0x1000 ? 12 : 4);
457 // lwu t3,dataOffset(t1)
458 *(DWORD*)p = 0x00036e03 | ((UINT32)dataOffset << 20); p += 4;
460 *(DWORD*)p = 0x01ce8eb3; p += 4;
462 *(DWORD*)p = 0x000ebe83; p += 4;
466 // ld t4, offsetOfIndirection(t4)
467 *(DWORD*)p = 0x000ebe83 | ((UINT32)offsetOfIndirection << 20); p += 4;
470 if (offsetAfterIndirection >= 0x1000)
472 uint indirectionsCodeSize = (offsetOfIndirection >= 0x1000 ? 12 : 4);
473 uint indirectionsDataSize = (offsetOfIndirection >= 0x1000 ? 4 : 0);
474 uint dataOffset = 20 + indirectionsCodeSize + indirectionsDataSize;
476 // ldw t3,dataOffset(t1)
477 *(DWORD*)p = 0x00036e03 | ((UINT32)dataOffset << 20); p += 4;
479 *(DWORD*)p = 0x01ce8eb3; p += 4;
481 *(DWORD*)p = 0x000ebe83; p += 4;
485 // ld t4, offsetAfterIndirection(t4)
486 *(DWORD*)p = 0x000ebe83 | ((UINT32)offsetAfterIndirection << 20); p += 4;
490 *(UINT32*)p = 0x000e8067; p += 4;
493 if (offsetOfIndirection >= 0x1000)
495 *(UINT32*)p = (UINT32)offsetOfIndirection;
498 if (offsetAfterIndirection >= 0x1000)
500 *(UINT32*)p = (UINT32)offsetAfterIndirection;
504 // Store the slot value here for convenience. Not a real instruction (unreachable anyways)
505 // NOTE: Not counted in codeSize above.
506 *(UINT32*)p = slot; p += 4;
508 _ASSERT(p == (BYTE*)stub()->entryPoint() + VTableCallHolder::GetHolderSize(slot));
509 _ASSERT(stub()->size() == VTableCallHolder::GetHolderSize(slot));
512 #endif // DACCESS_COMPILE
514 #endif //DECLARE_DATA
516 #endif // _VIRTUAL_CALL_STUB_RISCV64_H