0491dd880815b3293f420a76f97c24404683a587
[platform/upstream/dotnet/runtime.git] / src / coreclr / vm / riscv64 / virtualcallstubcpu.hpp
1 // Licensed to the .NET Foundation under one or more agreements.
2 // The .NET Foundation licenses this file to you under the MIT license.
3 //
4 // VirtualCallStubCpu.hpp
5 //
6 #ifndef _VIRTUAL_CALL_STUB_RISCV64_H
7 #define _VIRTUAL_CALL_STUB_RISCV64_H
8
9 #define DISPATCH_STUB_FIRST_DWORD 0x00000e97
10 #define RESOLVE_STUB_FIRST_DWORD 0x00053e03
11 #define VTABLECALL_STUB_FIRST_DWORD 0x00053e83
12
13 #define LOOKUP_STUB_FIRST_DWORD 0x00000f97
14
15 #define USES_LOOKUP_STUBS   1
16
17 struct LookupStub
18 {
19     inline PCODE entryPoint() { LIMITED_METHOD_CONTRACT; return (PCODE)&_entryPoint[0]; }
20     inline size_t token() { LIMITED_METHOD_CONTRACT; return _token; }
21     inline size_t size() { LIMITED_METHOD_CONTRACT; return sizeof(LookupStub); }
22 private :
23     friend struct LookupHolder;
24
25     DWORD _entryPoint[4];
26     PCODE _resolveWorkerTarget;
27     size_t _token;
28 };
29
30 struct LookupHolder
31 {
32 private:
33     LookupStub _stub;
34 public:
35     static void InitializeStatic() { }
36
37     void  Initialize(LookupHolder* pLookupHolderRX, PCODE resolveWorkerTarget, size_t dispatchToken)
38     {
39         // auipc t6, 0
40         // ld    t2, (12 + 12)(ra)
41         // ld    t6, (4 + 12)(ra)
42         // jalr  x0, t6, 0
43         //
44         // _resolveWorkerTarget
45         // _token
46
47         _stub._entryPoint[0] = LOOKUP_STUB_FIRST_DWORD; // auipc t6, 0  //0x00000f97
48         _stub._entryPoint[1] = 0x018fb383; //ld   t2, 24(t6)
49         _stub._entryPoint[2] = 0x010fbf83; //ld   t6, 16(t6)
50         _stub._entryPoint[3] = 0x000f8067; //jalr x0, t6, 0
51
52         _stub._resolveWorkerTarget = resolveWorkerTarget;
53         _stub._token               = dispatchToken;
54     }
55
56     LookupStub*    stub()        { LIMITED_METHOD_CONTRACT; return &_stub; }
57     static LookupHolder*  FromLookupEntry(PCODE lookupEntry)
58     {
59         return (LookupHolder*) ( lookupEntry - offsetof(LookupHolder, _stub) - offsetof(LookupStub, _entryPoint)  );
60     }
61 };
62
63 struct DispatchStub
64 {
65     inline PCODE entryPoint()         { LIMITED_METHOD_CONTRACT; return (PCODE)&_entryPoint[0]; }
66
67     inline size_t expectedMT()  { LIMITED_METHOD_CONTRACT; return _expectedMT; }
68     inline PCODE implTarget()  { LIMITED_METHOD_CONTRACT; return _implTarget; }
69
70     inline TADDR implTargetSlot(EntryPointSlots::SlotType *slotTypeRef) const
71     {
72         LIMITED_METHOD_CONTRACT;
73         _ASSERTE(slotTypeRef != nullptr);
74
75         *slotTypeRef = EntryPointSlots::SlotType_Executable;
76         return (TADDR)&_implTarget;
77     }
78
79     inline PCODE failTarget()  { LIMITED_METHOD_CONTRACT; return _failTarget; }
80     inline size_t size()        { LIMITED_METHOD_CONTRACT; return sizeof(DispatchStub); }
81
82 private:
83     friend struct DispatchHolder;
84
85     DWORD _entryPoint[8];
86     size_t  _expectedMT;
87     PCODE _implTarget;
88     PCODE _failTarget;
89 };
90
91 struct DispatchHolder
92 {
93     static void InitializeStatic()
94     {
95         LIMITED_METHOD_CONTRACT;
96
97         // Check that _implTarget is aligned in the DispatchHolder for backpatching
98         static_assert_no_msg(((offsetof(DispatchHolder, _stub) + offsetof(DispatchStub, _implTarget)) % sizeof(void *)) == 0);
99     }
100
101     void  Initialize(DispatchHolder* pDispatchHolderRX, PCODE implTarget, PCODE failTarget, size_t expectedMT)
102     {
103         // auipc t4,0
104         // ld    t0, 0(a0)     // methodTable from object in $a0
105         // ld    t6, 32(t4)    // t6 _expectedMT
106         // bne   t6, t0, failLabel
107         // ld    t4, 40(t4)    // t4 _implTarget
108         // jalr  x0, t4, 0
109         // failLabel:
110         // ld    t4, 48(t4)    // t4 _failTarget
111         // jalr  x0, t4, 0
112         //
113         //
114         // _expectedMT
115         // _implTarget
116         // _failTarget
117
118         _stub._entryPoint[0] = DISPATCH_STUB_FIRST_DWORD; // auipc t4,0  // 0x00000e97
119         _stub._entryPoint[1] = 0x00053283; // ld    t0, 0(a0)     // methodTable from object in $a0
120         _stub._entryPoint[2] = 0x020ebf83; // ld    t6, 32(t4)    // t6 _expectedMT
121         _stub._entryPoint[3] = 0x005f9663; // bne   t6, t0, failLabel
122         _stub._entryPoint[4] = 0x028ebe83; // ld    t4, 40(t4)    // t4 _implTarget
123         _stub._entryPoint[5] = 0x000e8067; // jalr  x0, t4, 0
124         _stub._entryPoint[6] = 0x030ebe83; // ld    t4, 48(t4)    // t4 _failTarget
125         _stub._entryPoint[7] = 0x000e8067; // jalr  x0, t4, 0
126
127         _stub._expectedMT = expectedMT;
128         _stub._implTarget = implTarget;
129         _stub._failTarget = failTarget;
130     }
131
132     DispatchStub* stub()      { LIMITED_METHOD_CONTRACT; return &_stub; }
133
134     static DispatchHolder*  FromDispatchEntry(PCODE dispatchEntry)
135     {
136         LIMITED_METHOD_CONTRACT;
137         DispatchHolder* dispatchHolder = (DispatchHolder*) ( dispatchEntry - offsetof(DispatchHolder, _stub) - offsetof(DispatchStub, _entryPoint) );
138         return dispatchHolder;
139     }
140
141 private:
142     DispatchStub _stub;
143 };
144
145 struct ResolveStub
146 {
147     inline PCODE failEntryPoint()            { LIMITED_METHOD_CONTRACT; return (PCODE)&_failEntryPoint[0]; }
148     inline PCODE resolveEntryPoint()         { LIMITED_METHOD_CONTRACT; return (PCODE)&_resolveEntryPoint[0]; }
149     inline PCODE slowEntryPoint()            { LIMITED_METHOD_CONTRACT; return (PCODE)&_slowEntryPoint[0]; }
150     inline size_t  token()                   { LIMITED_METHOD_CONTRACT; return _token; }
151     inline INT32*  pCounter()                { LIMITED_METHOD_CONTRACT; return _pCounter; }
152
153     inline UINT32  hashedToken()             { LIMITED_METHOD_CONTRACT; return _hashedToken >> LOG2_PTRSIZE;    }
154     inline size_t  cacheAddress()            { LIMITED_METHOD_CONTRACT; return _cacheAddress;   }
155     inline size_t  size()                    { LIMITED_METHOD_CONTRACT; return sizeof(ResolveStub); }
156
157 private:
158     friend struct ResolveHolder;
159     const static int resolveEntryPointLen = 20;
160     const static int slowEntryPointLen = 4;
161     const static int failEntryPointLen = 9;
162
163     DWORD _resolveEntryPoint[resolveEntryPointLen];
164     DWORD _slowEntryPoint[slowEntryPointLen];
165     DWORD _failEntryPoint[failEntryPointLen];
166     UINT32  _hashedToken;
167     INT32*  _pCounter;               //Base of the Data Region
168     size_t  _cacheAddress;           // lookupCache
169     size_t  _token;
170     PCODE   _resolveWorkerTarget;
171 };
172
173 struct ResolveHolder
174 {
175     static void  InitializeStatic() { }
176
177     void Initialize(ResolveHolder* pResolveHolderRX,
178                     PCODE resolveWorkerTarget, PCODE patcherTarget,
179                     size_t dispatchToken, UINT32 hashedToken,
180                     void * cacheAddr, INT32 * counterAddr)
181     {
182         int n=0;
183         INT32 pc_offset;
184
185 /******** Rough Convention of used in this routine
186         ;;ra  temp base address of loading data region
187         ;;t5  indirection cell
188         ;;t3  MethodTable (from object ref in a0), out: this._token
189         ;;t0  hash scratch
190         ;;t1  temp
191         ;;t2  temp
192         ;;t6 hash scratch
193         ;;cachemask => [CALL_STUB_CACHE_MASK * sizeof(void*)]
194
195         // Called directly by JITTED code
196         // ResolveStub._resolveEntryPoint(a0:Object*, a1 ...,a7, t8:IndirectionCellAndFlags)
197         // {
198         //    MethodTable mt = a0.m_pMethTab;
199         //    int i = ((mt + mt >> 12) ^ this._hashedToken) & _cacheMask
200         //    ResolveCacheElem e = this._cacheAddress + i
201         //    t1 = e = this._cacheAddress + i
202         //    if (mt == e.pMT && this._token == e.token)
203         //    {
204         //        (e.target)(a0, [a1,...,a7]);
205         //    }
206         //    else
207         //    {
208         //        t3 = this._token;
209         //        (this._slowEntryPoint)(a0, [a1,.., a7], t5, t3);
210         //    }
211         // }
212  ********/
213
214         ///;;resolveEntryPoint
215         // Called directly by JITTED code
216         // ResolveStub._resolveEntryPoint(a0:Object*, a1 ...,a7, t5:IndirectionCellAndFlags)
217
218         //      ld  t3, 0(a0)
219         _stub._resolveEntryPoint[n++] = RESOLVE_STUB_FIRST_DWORD;
220         //      srli  t0, t3, 0xc
221         _stub._resolveEntryPoint[n++] = 0x00ce5293;
222         //      add  t1, t3, t0
223         _stub._resolveEntryPoint[n++] = 0x005e0333;
224         //      auipc t0, 0
225         _stub._resolveEntryPoint[n++] = 0x00000297;
226         //  addi t0, t0, -12
227         _stub._resolveEntryPoint[n++] = 0xff428293;
228
229         //      lw  t6, 0(t0)  #t6 = this._hashedToken
230         _stub._resolveEntryPoint[n++] = 0x0002af83 | (33 << 22); //(20+4+9)*4<<20;
231         _ASSERTE((ResolveStub::resolveEntryPointLen+ResolveStub::slowEntryPointLen+ResolveStub::failEntryPointLen) == 33);
232         _ASSERTE((33<<2) == (offsetof(ResolveStub, _hashedToken) -offsetof(ResolveStub, _resolveEntryPoint[0])));
233
234         //      xor      t1, t1, t6
235         _stub._resolveEntryPoint[n++] = 0x01f34333;
236         //      cachemask
237         _ASSERTE(CALL_STUB_CACHE_MASK * sizeof(void*) == 0x7ff8);
238         // lui  t6, 0x7ff8
239         _stub._resolveEntryPoint[n++] = 0x07ff8fb7;
240         // srliw  t6, t6, 12
241         _stub._resolveEntryPoint[n++] = 0x00cfdf9b;
242         //      and  t1, t1, t6
243         _stub._resolveEntryPoint[n++] = 0x01f37333;
244         //      ld  t6, 0(t0)    # t6 = this._cacheAddress
245         _stub._resolveEntryPoint[n++] = 0x0002bf83 | (36 << 22); //(20+4+9+1+2)*4<<20;
246         _ASSERTE((ResolveStub::resolveEntryPointLen+ResolveStub::slowEntryPointLen+ResolveStub::failEntryPointLen+1+2) == 36);
247         _ASSERTE((36<<2) == (offsetof(ResolveStub, _cacheAddress) -offsetof(ResolveStub, _resolveEntryPoint[0])));
248         //  add t1, t6, t1
249         _stub._resolveEntryPoint[n++] = 0x006f8333;
250         //      ld  t1, 0(t1)    # t1 = e = this._cacheAddress[i]
251         _stub._resolveEntryPoint[n++] = 0x00033303;
252
253         //      ld  t6, 0(t1)    # t6 = Check mt == e.pMT;
254         _stub._resolveEntryPoint[n++] = 0x00033f83 | ((offsetof(ResolveCacheElem, pMT) & 0xfff) << 20);
255         //      ld  t2, 0(t0)  #  $t2 = this._token
256         _stub._resolveEntryPoint[n++] = 0x0002b383 | (38<<22);//(20+4+9+1+2+2)*4<<20;
257         _ASSERTE((ResolveStub::resolveEntryPointLen+ResolveStub::slowEntryPointLen+ResolveStub::failEntryPointLen+1+4) == 38);
258         _ASSERTE((38<<2) == (offsetof(ResolveStub, _token) -offsetof(ResolveStub, _resolveEntryPoint[0])));
259
260         //      bne  t6, t3, next
261         _stub._resolveEntryPoint[n++] = 0x01cf9a63;// | PC_REL_OFFSET(_slowEntryPoint[0], n);
262
263         //      ld  t6, 0(t1)    # t6 = e.token;
264         _stub._resolveEntryPoint[n++] = 0x00033f83 | ((offsetof(ResolveCacheElem, token) & 0xfff)<<10);
265         //      bne      t6, t2, next
266         _stub._resolveEntryPoint[n++] = 0x007f9663;// | PC_REL_OFFSET(_slowEntryPoint[0], n);
267
268          pc_offset = offsetof(ResolveCacheElem, target) & 0xffffffff;
269          _ASSERTE(pc_offset >=0 && pc_offset%8 == 0);
270         //      ld  t3, 0(t1)    # t3 = e.target;
271         _stub._resolveEntryPoint[n++] = 0x00033e03 | ((offsetof(ResolveCacheElem, target) & 0xfff)<<10);
272         //      jalr x0, t3, 0
273         _stub._resolveEntryPoint[n++] = 0x000e0067;
274
275         _ASSERTE(n == ResolveStub::resolveEntryPointLen);
276         _ASSERTE(_stub._resolveEntryPoint + n == _stub._slowEntryPoint);
277
278         // ResolveStub._slowEntryPoint(a0:MethodToken, [a1..a7], t5:IndirectionCellAndFlags)
279         // {
280         //     t2 = this._token;
281         //     this._resolveWorkerTarget(a0, [a1..a7], t5, t2);
282         // }
283 //#undef PC_REL_OFFSET
284 //#define PC_REL_OFFSET(_member, _index) (((INT32)(offsetof(ResolveStub, _member) - (offsetof(ResolveStub, _slowEntryPoint[_index])))) & 0xffff)
285         // ;;slowEntryPoint:
286         // ;;fall through to the slow case
287
288         //      auipc t0, 0
289         _stub._slowEntryPoint[0] = 0x00000297;
290         //      ld  t6, 0(t0)    # r21 = _resolveWorkerTarget;
291         _ASSERTE((0x14*4) == ((INT32)(offsetof(ResolveStub, _resolveWorkerTarget) - (offsetof(ResolveStub, _slowEntryPoint[0])))));
292         _ASSERTE((ResolveStub::slowEntryPointLen + ResolveStub::failEntryPointLen+1+3*2) == 0x14);
293         _stub._slowEntryPoint[1] = 0x0002bf83 | ((0x14 * 4) << 20);
294
295         //      ld  t2, 0(t0)    # t2 = this._token;
296         _stub._slowEntryPoint[2] = 0x0002b383 | ((0x12 * 4) << 20); //(18*4=72=0x48)<<20
297         _ASSERTE((ResolveStub::slowEntryPointLen+ResolveStub::failEntryPointLen+1+4)*4 == (0x12 * 4));
298         _ASSERTE((0x12 * 4) == (offsetof(ResolveStub, _token) -offsetof(ResolveStub, _slowEntryPoint[0])));
299
300         //      jalr  x0, t6, 0
301         _stub._slowEntryPoint[3] = 0x000f8067;
302
303          _ASSERTE(4 == ResolveStub::slowEntryPointLen);
304
305         // ResolveStub._failEntryPoint(a0:MethodToken, a1,.., a7, t5:IndirectionCellAndFlags)
306         // {
307         //     if(--*(this._pCounter) < 0) t5 = t5 | SDF_ResolveBackPatch;
308         //     this._resolveEntryPoint(a0, [a1..a7]);
309         // }
310 //#undef PC_REL_OFFSET
311 //#define PC_REL_OFFSET(_member, _index) (((INT32)(offsetof(ResolveStub, _member) - (offsetof(ResolveStub, _failEntryPoint[_index])))) & 0xffff)
312         //;;failEntryPoint
313
314         //      auipc t0, 0
315         _stub._failEntryPoint[0] = 0x00000297;
316         //      ld  t1, 0(t0)    # t1 = _pCounter;  0x2800000=((failEntryPointLen+1)*4)<<20.
317         _stub._failEntryPoint[1] = 0x0002b303 | 0x2800000;
318         _ASSERTE((((ResolveStub::failEntryPointLen+1)*4)<<20) == 0x2800000);
319         _ASSERTE((0x2800000>>20) == ((INT32)(offsetof(ResolveStub, _pCounter) - (offsetof(ResolveStub, _failEntryPoint[0])))));
320         //      lw  t6, 0(t1)
321         _stub._failEntryPoint[2] = 0x00032f83;
322         //      addi  t6, t6, -1
323         _stub._failEntryPoint[3] = 0xffff8f93;
324
325         //      sw  t6, 0(t1)
326         _stub._failEntryPoint[4] = 0x01f32023;
327
328         _ASSERTE(SDF_ResolveBackPatch == 0x1);
329         // ;; ori t5, t5, t6 >=0 ? SDF_ResolveBackPatch:0;
330         //      slti t6, t6, 0
331         _stub._failEntryPoint[5] = 0x000faf93;
332         //      xori t6, t6, 1
333         _stub._failEntryPoint[6] = 0x001fcf93;
334         //      or  t5, t5, t6
335         _stub._failEntryPoint[7] = 0x01ff6f33;
336
337         //      j       _resolveEntryPoint   // pc - 128 = pc + 4 - resolveEntryPointLen * 4 - slowEntryPointLen * 4 - failEntryPointLen * 4;
338         _stub._failEntryPoint[8] = 0xf81ff06f;
339
340         _ASSERTE(9 == ResolveStub::failEntryPointLen);
341          _stub._pCounter = counterAddr;
342         _stub._hashedToken         = hashedToken << LOG2_PTRSIZE;
343         _stub._cacheAddress        = (size_t) cacheAddr;
344         _stub._token               = dispatchToken;
345         _stub._resolveWorkerTarget = resolveWorkerTarget;
346
347         _ASSERTE(resolveWorkerTarget == (PCODE)ResolveWorkerChainLookupAsmStub);
348         _ASSERTE(patcherTarget == NULL);
349
350 #undef DATA_OFFSET
351 #undef PC_REL_OFFSET
352 #undef Dataregionbase
353     }
354
355     ResolveStub* stub()      { LIMITED_METHOD_CONTRACT; return &_stub; }
356
357     static ResolveHolder*  FromFailEntry(PCODE failEntry);
358     static ResolveHolder*  FromResolveEntry(PCODE resolveEntry);
359 private:
360     ResolveStub _stub;
361 };
362
363
364 /*VTableCallStub**************************************************************************************
365 These are jump stubs that perform a vtable-base virtual call. These stubs assume that an object is placed
366 in the first argument register (this pointer). From there, the stub extracts the MethodTable pointer, followed by the
367 vtable pointer, and finally jumps to the target method at a given slot in the vtable.
368 */
369 struct VTableCallStub
370 {
371     friend struct VTableCallHolder;
372
373     inline size_t size()
374     {
375         _ASSERTE(!"RISCV64:NYI");
376         return 0;
377     }
378
379     inline PCODE        entryPoint()        const { LIMITED_METHOD_CONTRACT;  return (PCODE)&_entryPoint[0]; }
380
381     inline size_t token()
382     {
383         LIMITED_METHOD_CONTRACT;
384         DWORD slot = *(DWORD*)(reinterpret_cast<BYTE*>(this) + size() - 4);
385         return DispatchToken::CreateDispatchToken(slot).To_SIZE_T();
386     }
387
388 private:
389     BYTE    _entryPoint[0];         // Dynamically sized stub. See Initialize() for more details.
390 };
391
392 /* VTableCallHolders are the containers for VTableCallStubs, they provide for any alignment of
393 stubs as necessary.  */
394 struct VTableCallHolder
395 {
396     void  Initialize(unsigned slot);
397
398     VTableCallStub* stub() { LIMITED_METHOD_CONTRACT;  return reinterpret_cast<VTableCallStub *>(this); }
399
400     static size_t GetHolderSize(unsigned slot)
401     {
402         STATIC_CONTRACT_WRAPPER;
403         unsigned offsetOfIndirection = MethodTable::GetVtableOffset() + MethodTable::GetIndexOfVtableIndirection(slot) * TARGET_POINTER_SIZE;
404         unsigned offsetAfterIndirection = MethodTable::GetIndexAfterVtableIndirection(slot) * TARGET_POINTER_SIZE;
405         int indirectionsCodeSize = (offsetOfIndirection >= 0x1000 ? 12 : 4) + (offsetAfterIndirection >= 0x1000 ? 12 : 4);
406         int indirectionsDataSize = (offsetOfIndirection >= 0x1000 ? 4 : 0) + (offsetAfterIndirection >= 0x1000 ? 4 : 0);
407         return 12 + indirectionsCodeSize + ((indirectionsDataSize > 0) ? (indirectionsDataSize + 4) : 0);
408     }
409
410     static VTableCallHolder* FromVTableCallEntry(PCODE entry) { LIMITED_METHOD_CONTRACT; return (VTableCallHolder*)entry; }
411
412 private:
413     // VTableCallStub follows here. It is dynamically sized on allocation because it could
414     // use short/long instruction sizes for LDR, depending on the slot value.
415 };
416
417
418 #ifdef DECLARE_DATA
419
420 #ifndef DACCESS_COMPILE
421 ResolveHolder* ResolveHolder::FromFailEntry(PCODE failEntry)
422 {
423     LIMITED_METHOD_CONTRACT;
424     ResolveHolder* resolveHolder = (ResolveHolder*) ( failEntry - offsetof(ResolveHolder, _stub) - offsetof(ResolveStub, _failEntryPoint) );
425     return resolveHolder;
426 }
427
428 ResolveHolder* ResolveHolder::FromResolveEntry(PCODE resolveEntry)
429 {
430     LIMITED_METHOD_CONTRACT;
431     ResolveHolder* resolveHolder = (ResolveHolder*) ( resolveEntry - offsetof(ResolveHolder, _stub) - offsetof(ResolveStub, _resolveEntryPoint) );
432     return resolveHolder;
433 }
434
435 void VTableCallHolder::Initialize(unsigned slot)
436 {
437     unsigned offsetOfIndirection = MethodTable::GetVtableOffset() + MethodTable::GetIndexOfVtableIndirection(slot) * TARGET_POINTER_SIZE;
438     unsigned offsetAfterIndirection = MethodTable::GetIndexAfterVtableIndirection(slot) * TARGET_POINTER_SIZE;
439
440     VTableCallStub* pStub = stub();
441     BYTE* p = (BYTE*)pStub->entryPoint();
442
443     // ld  t4, 0(a0) : t4 = MethodTable pointer
444     *(UINT32*)p = 0x00053e83; // VTABLECALL_STUB_FIRST_DWORD
445     p += 4;
446
447     if ((offsetOfIndirection >= 0x1000) || (offsetAfterIndirection >= 0x1000))
448     {
449         *(UINT32*)p = 0x00000317; // auipc t1, 0
450         p += 4;
451     }
452
453     if (offsetOfIndirection >= 0x1000)
454     {
455         uint dataOffset = 20 + (offsetAfterIndirection >= 0x1000 ? 12 : 4);
456
457         // lwu t3,dataOffset(t1)
458         *(DWORD*)p = 0x00036e03 | ((UINT32)dataOffset << 20); p += 4;
459         // add t4, t4, t3
460         *(DWORD*)p = 0x01ce8eb3; p += 4;
461         // ld t4, 0(t4)
462         *(DWORD*)p = 0x000ebe83; p += 4;
463     }
464     else
465     {
466         // ld t4, offsetOfIndirection(t4)
467         *(DWORD*)p = 0x000ebe83 | ((UINT32)offsetOfIndirection << 20); p += 4;
468     }
469
470     if (offsetAfterIndirection >= 0x1000)
471     {
472         uint indirectionsCodeSize = (offsetOfIndirection >= 0x1000 ? 12 : 4);
473         uint indirectionsDataSize = (offsetOfIndirection >= 0x1000 ? 4 : 0);
474         uint dataOffset = 20 + indirectionsCodeSize + indirectionsDataSize;
475
476         // ldw t3,dataOffset(t1)
477         *(DWORD*)p = 0x00036e03 | ((UINT32)dataOffset << 20); p += 4;
478         // add t4, t4, t3
479         *(DWORD*)p = 0x01ce8eb3; p += 4;
480         // ld t4, 0(t4)
481         *(DWORD*)p = 0x000ebe83; p += 4;
482     }
483     else
484     {
485         // ld t4, offsetAfterIndirection(t4)
486         *(DWORD*)p = 0x000ebe83 | ((UINT32)offsetAfterIndirection << 20); p += 4;
487     }
488
489     // jalr x0, t4, 0
490     *(UINT32*)p = 0x000e8067; p += 4;
491
492     // data labels:
493     if (offsetOfIndirection >= 0x1000)
494     {
495         *(UINT32*)p = (UINT32)offsetOfIndirection;
496         p += 4;
497     }
498     if (offsetAfterIndirection >= 0x1000)
499     {
500         *(UINT32*)p = (UINT32)offsetAfterIndirection;
501         p += 4;
502     }
503
504     // Store the slot value here for convenience. Not a real instruction (unreachable anyways)
505     // NOTE: Not counted in codeSize above.
506     *(UINT32*)p = slot; p += 4;
507
508     _ASSERT(p == (BYTE*)stub()->entryPoint() + VTableCallHolder::GetHolderSize(slot));
509     _ASSERT(stub()->size() == VTableCallHolder::GetHolderSize(slot));
510 }
511
512 #endif // DACCESS_COMPILE
513
514 #endif //DECLARE_DATA
515
516 #endif // _VIRTUAL_CALL_STUB_RISCV64_H