1 // Licensed to the .NET Foundation under one or more agreements.
2 // The .NET Foundation licenses this file to you under the MIT license.
6 // This file contains stub functions for unimplemented features need to
7 // run on the ARM64 platform.
10 #include "dllimportcallback.h"
11 #include "comdelegate.h"
12 #include "asmconstants.h"
13 #include "virtualcallstub.h"
14 #include "jitinterface.h"
18 #ifndef DACCESS_COMPILE
19 //-----------------------------------------------------------------------
20 // InstructionFormat for JAL/JALR (unconditional jump)
21 //-----------------------------------------------------------------------
22 class BranchInstructionFormat : public InstructionFormat
24 // Encoding of the VariationCode:
25 // bit(0) indicates whether this is a direct or an indirect jump.
26 // bit(1) indicates whether this is a branch with link -a.k.a call-
31 BIF_VAR_INDIRECT = 0x00000001,
32 BIF_VAR_CALL = 0x00000002,
34 BIF_VAR_JUMP = 0x00000000,
35 BIF_VAR_INDIRECT_CALL = 0x00000003
38 BOOL IsIndirect(UINT variationCode)
40 return (variationCode & BIF_VAR_INDIRECT) != 0;
42 BOOL IsCall(UINT variationCode)
44 return (variationCode & BIF_VAR_CALL) != 0;
49 BranchInstructionFormat() : InstructionFormat(InstructionFormat::k64)
51 LIMITED_METHOD_CONTRACT;
54 virtual UINT GetSizeOfInstruction(UINT refSize, UINT variationCode)
56 LIMITED_METHOD_CONTRACT;
57 _ASSERTE(refSize == InstructionFormat::k64);
59 if (IsIndirect(variationCode))
65 virtual UINT GetSizeOfData(UINT refSize, UINT variationCode)
72 virtual UINT GetHotSpotOffset(UINT refsize, UINT variationCode)
78 virtual BOOL CanReach(UINT refSize, UINT variationCode, BOOL fExternal, INT_PTR offset)
82 // Note that the parameter 'offset' is not an offset but the target address itself (when fExternal is true)
83 return (refSize == InstructionFormat::k64);
87 return ((offset >= -0x80000000L && offset <= 0x7fffffff) || (refSize == InstructionFormat::k64));
91 virtual VOID EmitInstruction(UINT refSize, __int64 fixedUpReference, BYTE *pOutBufferRX, BYTE *pOutBufferRW, UINT variationCode, BYTE *pDataBuffer)
93 LIMITED_METHOD_CONTRACT;
95 if (IsIndirect(variationCode))
97 _ASSERTE(((UINT_PTR)pDataBuffer & 7) == 0);
99 __int64 dataOffset = pDataBuffer - pOutBufferRW;
101 if ((dataOffset < -(0x80000000L)) || (dataOffset > 0x7fffffff))
102 COMPlusThrow(kNotSupportedException);
104 UINT16 imm12 = (UINT16)(0xFFF & dataOffset);
105 // auipc t1, dataOffset[31:12]
106 // ld t1, t1, dataOffset[11:0]
110 *(DWORD*)pOutBufferRW = 0x00000317 | (((dataOffset + 0x800) >> 12) << 12); // auipc t1, dataOffset[31:12]
111 *(DWORD*)(pOutBufferRW + 4) = 0x00033303 | (imm12 << 20); // ld t1, t1, dataOffset[11:0]
112 *(DWORD*)(pOutBufferRW + 8) = 0x00033303; // ld t1, 0(t1)
113 if (IsCall(variationCode))
115 *(DWORD*)(pOutBufferRW + 12) = 0x000300e7; // jalr ra, t1, 0
119 *(DWORD*)(pOutBufferRW + 12) = 0x00030067 ;// jalr x0, t1,0
122 *((__int64*)pDataBuffer) = fixedUpReference + (__int64)pOutBufferRX;
126 _ASSERTE(((UINT_PTR)pDataBuffer & 7) == 0);
128 __int64 dataOffset = pDataBuffer - pOutBufferRW;
130 if ((dataOffset < -(0x80000000L)) || (dataOffset > 0x7fffffff))
131 COMPlusThrow(kNotSupportedException);
133 UINT16 imm12 = (UINT16)(0xFFF & dataOffset);
134 // auipc t1, dataOffset[31:12]
135 // ld t1, t1, dataOffset[11:0]
138 *(DWORD*)pOutBufferRW = 0x00000317 | (((dataOffset + 0x800) >> 12) << 12);// auipc t1, dataOffset[31:12]
139 *(DWORD*)(pOutBufferRW + 4) = 0x00033303 | (imm12 << 20); // ld t1, t1, dataOffset[11:0]
140 if (IsCall(variationCode))
142 *(DWORD*)(pOutBufferRW + 8) = 0x000300e7; // jalr ra, t1, 0
146 *(DWORD*)(pOutBufferRW + 8) = 0x00030067 ;// jalr x0, t1,0
149 if (!ClrSafeInt<__int64>::addition(fixedUpReference, (__int64)pOutBufferRX, fixedUpReference))
150 COMPlusThrowArithmetic();
151 *((__int64*)pDataBuffer) = fixedUpReference;
156 static BYTE gBranchIF[sizeof(BranchInstructionFormat)];
160 void ClearRegDisplayArgumentAndScratchRegisters(REGDISPLAY * pRD)
162 pRD->volatileCurrContextPointers.R0 = NULL;
163 pRD->volatileCurrContextPointers.A0 = NULL;
164 pRD->volatileCurrContextPointers.A1 = NULL;
165 pRD->volatileCurrContextPointers.A2 = NULL;
166 pRD->volatileCurrContextPointers.A3 = NULL;
167 pRD->volatileCurrContextPointers.A4 = NULL;
168 pRD->volatileCurrContextPointers.A5 = NULL;
169 pRD->volatileCurrContextPointers.A6 = NULL;
170 pRD->volatileCurrContextPointers.A7 = NULL;
171 pRD->volatileCurrContextPointers.T0 = NULL;
172 pRD->volatileCurrContextPointers.T1 = NULL;
173 pRD->volatileCurrContextPointers.T2 = NULL;
174 pRD->volatileCurrContextPointers.T3 = NULL;
175 pRD->volatileCurrContextPointers.T4 = NULL;
176 pRD->volatileCurrContextPointers.T5 = NULL;
177 pRD->volatileCurrContextPointers.T6 = NULL;
180 void LazyMachState::unwindLazyState(LazyMachState* baseState,
181 MachState* unwoundstate,
184 HostCallPreference hostCallPreference)
187 T_KNONVOLATILE_CONTEXT_POINTERS nonVolContextPtrs;
189 context.ContextFlags = 0; // Read by PAL_VirtualUnwind.
191 context.Fp = unwoundstate->captureCalleeSavedRegisters[0] = baseState->captureCalleeSavedRegisters[0];
192 context.S1 = unwoundstate->captureCalleeSavedRegisters[1] = baseState->captureCalleeSavedRegisters[1];
193 context.S2 = unwoundstate->captureCalleeSavedRegisters[2] = baseState->captureCalleeSavedRegisters[2];
194 context.S3 = unwoundstate->captureCalleeSavedRegisters[3] = baseState->captureCalleeSavedRegisters[3];
195 context.S4 = unwoundstate->captureCalleeSavedRegisters[4] = baseState->captureCalleeSavedRegisters[4];
196 context.S5 = unwoundstate->captureCalleeSavedRegisters[5] = baseState->captureCalleeSavedRegisters[5];
197 context.S6 = unwoundstate->captureCalleeSavedRegisters[6] = baseState->captureCalleeSavedRegisters[6];
198 context.S7 = unwoundstate->captureCalleeSavedRegisters[7] = baseState->captureCalleeSavedRegisters[7];
199 context.S8 = unwoundstate->captureCalleeSavedRegisters[8] = baseState->captureCalleeSavedRegisters[8];
200 context.S9 = unwoundstate->captureCalleeSavedRegisters[9] = baseState->captureCalleeSavedRegisters[9];
201 context.S10 = unwoundstate->captureCalleeSavedRegisters[10] = baseState->captureCalleeSavedRegisters[10];
202 context.S11 = unwoundstate->captureCalleeSavedRegisters[11] = baseState->captureCalleeSavedRegisters[11];
203 context.Gp = unwoundstate->captureCalleeSavedRegisters[12] = baseState->captureCalleeSavedRegisters[12];
204 context.Tp = unwoundstate->captureCalleeSavedRegisters[13] = baseState->captureCalleeSavedRegisters[13];
205 context.Ra = NULL; // Filled by the unwinder
207 context.Sp = baseState->captureSp;
208 context.Pc = baseState->captureIp;
210 #if !defined(DACCESS_COMPILE)
211 // For DAC, if we get here, it means that the LazyMachState is uninitialized and we have to unwind it.
212 // The API we use to unwind in DAC is StackWalk64(), which does not support the context pointers.
214 // Restore the integer registers to KNONVOLATILE_CONTEXT_POINTERS to be used for unwinding.
215 nonVolContextPtrs.Fp = &unwoundstate->captureCalleeSavedRegisters[0];
216 nonVolContextPtrs.S1 = &unwoundstate->captureCalleeSavedRegisters[1];
217 nonVolContextPtrs.S2 = &unwoundstate->captureCalleeSavedRegisters[2];
218 nonVolContextPtrs.S3 = &unwoundstate->captureCalleeSavedRegisters[3];
219 nonVolContextPtrs.S4 = &unwoundstate->captureCalleeSavedRegisters[4];
220 nonVolContextPtrs.S5 = &unwoundstate->captureCalleeSavedRegisters[5];
221 nonVolContextPtrs.S6 = &unwoundstate->captureCalleeSavedRegisters[6];
222 nonVolContextPtrs.S7 = &unwoundstate->captureCalleeSavedRegisters[7];
223 nonVolContextPtrs.S8 = &unwoundstate->captureCalleeSavedRegisters[8];
224 nonVolContextPtrs.S9 = &unwoundstate->captureCalleeSavedRegisters[9];
225 nonVolContextPtrs.S10 = &unwoundstate->captureCalleeSavedRegisters[10];
226 nonVolContextPtrs.S11 = &unwoundstate->captureCalleeSavedRegisters[11];
227 nonVolContextPtrs.Gp = &unwoundstate->captureCalleeSavedRegisters[12];
228 nonVolContextPtrs.Tp = &unwoundstate->captureCalleeSavedRegisters[13];
229 nonVolContextPtrs.Ra = NULL; // Filled by the unwinder
231 #endif // DACCESS_COMPILE
233 LOG((LF_GCROOTS, LL_INFO100000, "STACKWALK LazyMachState::unwindLazyState(ip:%p,sp:%p)\n", baseState->captureIp, baseState->captureSp));
240 pvControlPc = Thread::VirtualUnwindCallFrame(&context, &nonVolContextPtrs);
241 #else // !TARGET_UNIX
242 #ifdef DACCESS_COMPILE
243 HRESULT hr = DacVirtualUnwind(threadId, &context, &nonVolContextPtrs);
248 #else // DACCESS_COMPILE
249 BOOL success = PAL_VirtualUnwind(&context, &nonVolContextPtrs);
252 _ASSERTE(!"unwindLazyState: Unwinding failed");
253 EEPOLICY_HANDLE_FATAL_ERROR(COR_E_EXECUTIONENGINE);
255 #endif // DACCESS_COMPILE
256 pvControlPc = GetIP(&context);
257 #endif // !TARGET_UNIX
259 if (funCallDepth > 0)
262 if (funCallDepth == 0)
267 // Determine whether given IP resides in JITted code. (It returns nonzero in that case.)
268 // Use it now to see if we've unwound to managed code yet.
269 BOOL fFailedReaderLock = FALSE;
270 BOOL fIsManagedCode = ExecutionManager::IsManagedCode(pvControlPc, hostCallPreference, &fFailedReaderLock);
271 if (fFailedReaderLock)
273 // We don't know if we would have been able to find a JIT
274 // manager, because we couldn't enter the reader lock without
275 // yielding (and our caller doesn't want us to yield). So abort
278 // Invalidate the lazyState we're returning, so the caller knows
279 // we aborted before we could fully unwind
280 unwoundstate->_isValid = false;
291 unwoundstate->captureCalleeSavedRegisters[0] = context.Fp;
292 unwoundstate->captureCalleeSavedRegisters[1] = context.S1;
293 unwoundstate->captureCalleeSavedRegisters[2] = context.S2;
294 unwoundstate->captureCalleeSavedRegisters[3] = context.S3;
295 unwoundstate->captureCalleeSavedRegisters[4] = context.S4;
296 unwoundstate->captureCalleeSavedRegisters[5] = context.S5;
297 unwoundstate->captureCalleeSavedRegisters[6] = context.S6;
298 unwoundstate->captureCalleeSavedRegisters[7] = context.S7;
299 unwoundstate->captureCalleeSavedRegisters[8] = context.S8;
300 unwoundstate->captureCalleeSavedRegisters[9] = context.S9;
301 unwoundstate->captureCalleeSavedRegisters[10] = context.S10;
302 unwoundstate->captureCalleeSavedRegisters[11] = context.S11;
303 unwoundstate->captureCalleeSavedRegisters[12] = context.Gp;
304 unwoundstate->captureCalleeSavedRegisters[13] = context.Tp;
307 #ifdef DACCESS_COMPILE
308 // For DAC builds, we update the registers directly since we dont have context pointers
309 unwoundstate->captureCalleeSavedRegisters[0] = context.Fp;
310 unwoundstate->captureCalleeSavedRegisters[1] = context.S1;
311 unwoundstate->captureCalleeSavedRegisters[2] = context.S2;
312 unwoundstate->captureCalleeSavedRegisters[3] = context.S3;
313 unwoundstate->captureCalleeSavedRegisters[4] = context.S4;
314 unwoundstate->captureCalleeSavedRegisters[5] = context.S5;
315 unwoundstate->captureCalleeSavedRegisters[6] = context.S6;
316 unwoundstate->captureCalleeSavedRegisters[7] = context.S7;
317 unwoundstate->captureCalleeSavedRegisters[8] = context.S8;
318 unwoundstate->captureCalleeSavedRegisters[9] = context.S9;
319 unwoundstate->captureCalleeSavedRegisters[10] = context.S10;
320 unwoundstate->captureCalleeSavedRegisters[11] = context.S11;
321 unwoundstate->captureCalleeSavedRegisters[12] = context.Gp;
322 unwoundstate->captureCalleeSavedRegisters[13] = context.Tp;
323 #else // !DACCESS_COMPILE
324 // For non-DAC builds, update the register state from context pointers
325 unwoundstate->ptrCalleeSavedRegisters[0] = nonVolContextPtrs.Fp;
326 unwoundstate->ptrCalleeSavedRegisters[1] = nonVolContextPtrs.S1;
327 unwoundstate->ptrCalleeSavedRegisters[2] = nonVolContextPtrs.S2;
328 unwoundstate->ptrCalleeSavedRegisters[3] = nonVolContextPtrs.S3;
329 unwoundstate->ptrCalleeSavedRegisters[4] = nonVolContextPtrs.S4;
330 unwoundstate->ptrCalleeSavedRegisters[5] = nonVolContextPtrs.S5;
331 unwoundstate->ptrCalleeSavedRegisters[6] = nonVolContextPtrs.S6;
332 unwoundstate->ptrCalleeSavedRegisters[7] = nonVolContextPtrs.S7;
333 unwoundstate->ptrCalleeSavedRegisters[8] = nonVolContextPtrs.S8;
334 unwoundstate->ptrCalleeSavedRegisters[9] = nonVolContextPtrs.S9;
335 unwoundstate->ptrCalleeSavedRegisters[10] = nonVolContextPtrs.S10;
336 unwoundstate->ptrCalleeSavedRegisters[11] = nonVolContextPtrs.S11;
337 unwoundstate->ptrCalleeSavedRegisters[12] = nonVolContextPtrs.Gp;
338 unwoundstate->ptrCalleeSavedRegisters[13] = nonVolContextPtrs.Tp;
339 #endif // DACCESS_COMPILE
341 unwoundstate->_pc = context.Pc;
342 unwoundstate->_sp = context.Sp;
344 unwoundstate->_isValid = TRUE;
347 void HelperMethodFrame::UpdateRegDisplay(const PREGDISPLAY pRD)
358 pRD->IsCallerContextValid = FALSE;
359 pRD->IsCallerSPValid = FALSE; // Don't add usage of this field. This is only temporary.
362 // Copy the saved state from the frame to the current context.
365 LOG((LF_GCROOTS, LL_INFO100000, "STACKWALK HelperMethodFrame::UpdateRegDisplay cached ip:%p, sp:%p\n", m_MachState._pc, m_MachState._sp));
367 #if defined(DACCESS_COMPILE)
368 // For DAC, we may get here when the HMF is still uninitialized.
369 // So we may need to unwind here.
370 if (!m_MachState.isValid())
372 // This allocation throws on OOM.
373 MachState* pUnwoundState = (MachState*)DacAllocHostOnlyInstance(sizeof(*pUnwoundState), true);
375 InsureInit(false, pUnwoundState);
377 pRD->pCurrentContext->Pc = pRD->ControlPC = pUnwoundState->_pc;
378 pRD->pCurrentContext->Sp = pRD->SP = pUnwoundState->_sp;
379 pRD->pCurrentContext->Fp = (DWORD64)(pUnwoundState->captureCalleeSavedRegisters[0]);
380 pRD->pCurrentContext->S1 = (DWORD64)(pUnwoundState->captureCalleeSavedRegisters[1]);
381 pRD->pCurrentContext->S2 = (DWORD64)(pUnwoundState->captureCalleeSavedRegisters[2]);
382 pRD->pCurrentContext->S3 = (DWORD64)(pUnwoundState->captureCalleeSavedRegisters[3]);
383 pRD->pCurrentContext->S4 = (DWORD64)(pUnwoundState->captureCalleeSavedRegisters[4]);
384 pRD->pCurrentContext->S5 = (DWORD64)(pUnwoundState->captureCalleeSavedRegisters[5]);
385 pRD->pCurrentContext->S6 = (DWORD64)(pUnwoundState->captureCalleeSavedRegisters[6]);
386 pRD->pCurrentContext->S7 = (DWORD64)(pUnwoundState->captureCalleeSavedRegisters[7]);
387 pRD->pCurrentContext->S8 = (DWORD64)(pUnwoundState->captureCalleeSavedRegisters[8]);
388 pRD->pCurrentContext->S9 = (DWORD64)(pUnwoundState->captureCalleeSavedRegisters[9]);
389 pRD->pCurrentContext->S10 = (DWORD64)(pUnwoundState->captureCalleeSavedRegisters[10]);
390 pRD->pCurrentContext->S11 = (DWORD64)(pUnwoundState->captureCalleeSavedRegisters[11]);
391 pRD->pCurrentContext->Gp = (DWORD64)(pUnwoundState->captureCalleeSavedRegisters[12]);
392 pRD->pCurrentContext->Tp = (DWORD64)(pUnwoundState->captureCalleeSavedRegisters[13]);
393 pRD->pCurrentContext->Ra = NULL; // Unwind again to get Caller's PC
395 pRD->pCurrentContextPointers->Fp = pUnwoundState->ptrCalleeSavedRegisters[0];
396 pRD->pCurrentContextPointers->S1 = pUnwoundState->ptrCalleeSavedRegisters[1];
397 pRD->pCurrentContextPointers->S2 = pUnwoundState->ptrCalleeSavedRegisters[2];
398 pRD->pCurrentContextPointers->S3 = pUnwoundState->ptrCalleeSavedRegisters[3];
399 pRD->pCurrentContextPointers->S4 = pUnwoundState->ptrCalleeSavedRegisters[4];
400 pRD->pCurrentContextPointers->S5 = pUnwoundState->ptrCalleeSavedRegisters[5];
401 pRD->pCurrentContextPointers->S6 = pUnwoundState->ptrCalleeSavedRegisters[6];
402 pRD->pCurrentContextPointers->S7 = pUnwoundState->ptrCalleeSavedRegisters[7];
403 pRD->pCurrentContextPointers->S8 = pUnwoundState->ptrCalleeSavedRegisters[8];
404 pRD->pCurrentContextPointers->S9 = pUnwoundState->ptrCalleeSavedRegisters[9];
405 pRD->pCurrentContextPointers->S10 = pUnwoundState->ptrCalleeSavedRegisters[10];
406 pRD->pCurrentContextPointers->S11 = pUnwoundState->ptrCalleeSavedRegisters[11];
407 pRD->pCurrentContextPointers->Gp = pUnwoundState->ptrCalleeSavedRegisters[12];
408 pRD->pCurrentContextPointers->Tp = pUnwoundState->ptrCalleeSavedRegisters[13];
409 pRD->pCurrentContextPointers->Ra = NULL;
412 #endif // DACCESS_COMPILE
414 // reset pContext; it's only valid for active (top-most) frame
415 pRD->pContext = NULL;
416 pRD->ControlPC = GetReturnAddress(); // m_MachState._pc;
417 pRD->SP = (DWORD64)(size_t)m_MachState._sp;
419 pRD->pCurrentContext->Pc = pRD->ControlPC;
420 pRD->pCurrentContext->Sp = pRD->SP;
423 pRD->pCurrentContext->Fp = m_MachState.ptrCalleeSavedRegisters[0] ? *m_MachState.ptrCalleeSavedRegisters[0] : m_MachState.captureCalleeSavedRegisters[0];
424 pRD->pCurrentContext->S1 = m_MachState.ptrCalleeSavedRegisters[1] ? *m_MachState.ptrCalleeSavedRegisters[1] : m_MachState.captureCalleeSavedRegisters[1];
425 pRD->pCurrentContext->S2 = m_MachState.ptrCalleeSavedRegisters[2] ? *m_MachState.ptrCalleeSavedRegisters[2] : m_MachState.captureCalleeSavedRegisters[2];
426 pRD->pCurrentContext->S3 = m_MachState.ptrCalleeSavedRegisters[3] ? *m_MachState.ptrCalleeSavedRegisters[3] : m_MachState.captureCalleeSavedRegisters[3];
427 pRD->pCurrentContext->S4 = m_MachState.ptrCalleeSavedRegisters[4] ? *m_MachState.ptrCalleeSavedRegisters[4] : m_MachState.captureCalleeSavedRegisters[4];
428 pRD->pCurrentContext->S5 = m_MachState.ptrCalleeSavedRegisters[5] ? *m_MachState.ptrCalleeSavedRegisters[5] : m_MachState.captureCalleeSavedRegisters[5];
429 pRD->pCurrentContext->S6 = m_MachState.ptrCalleeSavedRegisters[6] ? *m_MachState.ptrCalleeSavedRegisters[6] : m_MachState.captureCalleeSavedRegisters[6];
430 pRD->pCurrentContext->S7 = m_MachState.ptrCalleeSavedRegisters[7] ? *m_MachState.ptrCalleeSavedRegisters[7] : m_MachState.captureCalleeSavedRegisters[7];
431 pRD->pCurrentContext->S8 = m_MachState.ptrCalleeSavedRegisters[8] ? *m_MachState.ptrCalleeSavedRegisters[8] : m_MachState.captureCalleeSavedRegisters[8];
432 pRD->pCurrentContext->S9 = m_MachState.ptrCalleeSavedRegisters[9] ? *m_MachState.ptrCalleeSavedRegisters[9] : m_MachState.captureCalleeSavedRegisters[9];
433 pRD->pCurrentContext->S10 = m_MachState.ptrCalleeSavedRegisters[10] ? *m_MachState.ptrCalleeSavedRegisters[10] : m_MachState.captureCalleeSavedRegisters[10];
434 pRD->pCurrentContext->S11 = m_MachState.ptrCalleeSavedRegisters[11] ? *m_MachState.ptrCalleeSavedRegisters[11] : m_MachState.captureCalleeSavedRegisters[11];
435 pRD->pCurrentContext->Gp = m_MachState.ptrCalleeSavedRegisters[12] ? *m_MachState.ptrCalleeSavedRegisters[12] : m_MachState.captureCalleeSavedRegisters[12];
436 pRD->pCurrentContext->Tp = m_MachState.ptrCalleeSavedRegisters[13] ? *m_MachState.ptrCalleeSavedRegisters[13] : m_MachState.captureCalleeSavedRegisters[13];
437 pRD->pCurrentContext->Ra = NULL; // Unwind again to get Caller's PC
439 pRD->pCurrentContext->Fp = *m_MachState.ptrCalleeSavedRegisters[0];
440 pRD->pCurrentContext->S1 = *m_MachState.ptrCalleeSavedRegisters[1];
441 pRD->pCurrentContext->S2 = *m_MachState.ptrCalleeSavedRegisters[2];
442 pRD->pCurrentContext->S3 = *m_MachState.ptrCalleeSavedRegisters[3];
443 pRD->pCurrentContext->S4 = *m_MachState.ptrCalleeSavedRegisters[4];
444 pRD->pCurrentContext->S5 = *m_MachState.ptrCalleeSavedRegisters[5];
445 pRD->pCurrentContext->S6 = *m_MachState.ptrCalleeSavedRegisters[6];
446 pRD->pCurrentContext->S7 = *m_MachState.ptrCalleeSavedRegisters[7];
447 pRD->pCurrentContext->S8 = *m_MachState.ptrCalleeSavedRegisters[8];
448 pRD->pCurrentContext->S9 = *m_MachState.ptrCalleeSavedRegisters[9];
449 pRD->pCurrentContext->S10 = *m_MachState.ptrCalleeSavedRegisters[10];
450 pRD->pCurrentContext->S11 = *m_MachState.ptrCalleeSavedRegisters[11];
451 pRD->pCurrentContext->Gp = *m_MachState.ptrCalleeSavedRegisters[12];
452 pRD->pCurrentContext->Tp = *m_MachState.ptrCalleeSavedRegisters[13];
453 pRD->pCurrentContext->Ra = NULL; // Unwind again to get Caller's PC
456 #if !defined(DACCESS_COMPILE)
457 pRD->pCurrentContextPointers->Fp = m_MachState.ptrCalleeSavedRegisters[0];
458 pRD->pCurrentContextPointers->S1 = m_MachState.ptrCalleeSavedRegisters[1];
459 pRD->pCurrentContextPointers->S2 = m_MachState.ptrCalleeSavedRegisters[2];
460 pRD->pCurrentContextPointers->S3 = m_MachState.ptrCalleeSavedRegisters[3];
461 pRD->pCurrentContextPointers->S4 = m_MachState.ptrCalleeSavedRegisters[4];
462 pRD->pCurrentContextPointers->S5 = m_MachState.ptrCalleeSavedRegisters[5];
463 pRD->pCurrentContextPointers->S6 = m_MachState.ptrCalleeSavedRegisters[6];
464 pRD->pCurrentContextPointers->S7 = m_MachState.ptrCalleeSavedRegisters[7];
465 pRD->pCurrentContextPointers->S8 = m_MachState.ptrCalleeSavedRegisters[8];
466 pRD->pCurrentContextPointers->S9 = m_MachState.ptrCalleeSavedRegisters[9];
467 pRD->pCurrentContextPointers->S10 = m_MachState.ptrCalleeSavedRegisters[10];
468 pRD->pCurrentContextPointers->S11 = m_MachState.ptrCalleeSavedRegisters[11];
469 pRD->pCurrentContextPointers->Gp = m_MachState.ptrCalleeSavedRegisters[12];
470 pRD->pCurrentContextPointers->Tp = m_MachState.ptrCalleeSavedRegisters[13];
471 pRD->pCurrentContextPointers->Ra = NULL; // Unwind again to get Caller's PC
473 ClearRegDisplayArgumentAndScratchRegisters(pRD);
476 #ifndef DACCESS_COMPILE
477 void ThisPtrRetBufPrecode::Init(MethodDesc* pMD, LoaderAllocator *pLoaderAllocator)
484 m_rgCode[0] = 0x00050f93; // addi t6, a0, 0x0
485 m_rgCode[1] = 0x00058513; // addi a0, a1, 0x0
486 m_rgCode[2] = 0x000f8593; // addi a1, t6, 0x0
487 m_rgCode[3] = 0x00000f97; // auipc t6, 0
488 m_rgCode[4] = 0x00cfbf83; // ld t6, 12(t6)
489 m_rgCode[5] = 0x000f8067; // jalr x0, 0(t6)
491 _ASSERTE((UINT32*)&m_pTarget == &m_rgCode[6]);
492 _ASSERTE(6 == ARRAY_SIZE(m_rgCode));
494 m_pTarget = GetPreStubEntryPoint();
495 m_pMethodDesc = (TADDR)pMD;
498 #endif // !DACCESS_COMPILE
500 void UpdateRegDisplayFromCalleeSavedRegisters(REGDISPLAY * pRD, CalleeSavedRegisters * pCalleeSaved)
502 LIMITED_METHOD_CONTRACT;
503 pRD->pCurrentContext->S1 = pCalleeSaved->s1;
504 pRD->pCurrentContext->S2 = pCalleeSaved->s2;
505 pRD->pCurrentContext->S3 = pCalleeSaved->s3;
506 pRD->pCurrentContext->S4 = pCalleeSaved->s4;
507 pRD->pCurrentContext->S5 = pCalleeSaved->s5;
508 pRD->pCurrentContext->S6 = pCalleeSaved->s6;
509 pRD->pCurrentContext->S7 = pCalleeSaved->s7;
510 pRD->pCurrentContext->S8 = pCalleeSaved->s8;
511 pRD->pCurrentContext->S9 = pCalleeSaved->s9;
512 pRD->pCurrentContext->S10 = pCalleeSaved->s10;
513 pRD->pCurrentContext->S11 = pCalleeSaved->s11;
514 pRD->pCurrentContext->Gp = pCalleeSaved->gp;
515 pRD->pCurrentContext->Tp = pCalleeSaved->tp;
516 pRD->pCurrentContext->Fp = pCalleeSaved->fp;
517 pRD->pCurrentContext->Ra = pCalleeSaved->ra;
519 T_KNONVOLATILE_CONTEXT_POINTERS * pContextPointers = pRD->pCurrentContextPointers;
520 pContextPointers->S1 = (PDWORD64)&pCalleeSaved->s1;
521 pContextPointers->S2 = (PDWORD64)&pCalleeSaved->s2;
522 pContextPointers->S3 = (PDWORD64)&pCalleeSaved->s3;
523 pContextPointers->S4 = (PDWORD64)&pCalleeSaved->s4;
524 pContextPointers->S5 = (PDWORD64)&pCalleeSaved->s5;
525 pContextPointers->S6 = (PDWORD64)&pCalleeSaved->s6;
526 pContextPointers->S7 = (PDWORD64)&pCalleeSaved->s7;
527 pContextPointers->S8 = (PDWORD64)&pCalleeSaved->s8;
528 pContextPointers->S9 = (PDWORD64)&pCalleeSaved->s9;
529 pContextPointers->S10 = (PDWORD64)&pCalleeSaved->s10;
530 pContextPointers->S11 = (PDWORD64)&pCalleeSaved->s11;
531 pContextPointers->Gp = (PDWORD64)&pCalleeSaved->gp;
532 pContextPointers->Tp = (PDWORD64)&pCalleeSaved->tp;
533 pContextPointers->Fp = (PDWORD64)&pCalleeSaved->fp;
534 pContextPointers->Ra = (PDWORD64)&pCalleeSaved->ra;
537 void TransitionFrame::UpdateRegDisplay(const PREGDISPLAY pRD)
539 pRD->IsCallerContextValid = FALSE;
540 pRD->IsCallerSPValid = FALSE; // Don't add usage of this field. This is only temporary.
542 // copy the callee saved regs
543 CalleeSavedRegisters *pCalleeSaved = GetCalleeSavedRegisters();
544 UpdateRegDisplayFromCalleeSavedRegisters(pRD, pCalleeSaved);
546 ClearRegDisplayArgumentAndScratchRegisters(pRD);
548 // copy the control registers
549 //pRD->pCurrentContext->Fp = pCalleeSaved->fp;//not needed for duplicated.
550 //pRD->pCurrentContext->Ra = pCalleeSaved->ra;//not needed for duplicated.
551 pRD->pCurrentContext->Pc = GetReturnAddress();
552 pRD->pCurrentContext->Sp = this->GetSP();
554 // Finally, syncup the regdisplay with the context
555 SyncRegDisplayToCurrentContext(pRD);
557 LOG((LF_GCROOTS, LL_INFO100000, "STACKWALK TransitionFrame::UpdateRegDisplay(pc:%p, sp:%p)\n", pRD->ControlPC, pRD->SP));
560 void FaultingExceptionFrame::UpdateRegDisplay(const PREGDISPLAY pRD)
562 LIMITED_METHOD_DAC_CONTRACT;
564 // Copy the context to regdisplay
565 memcpy(pRD->pCurrentContext, &m_ctx, sizeof(T_CONTEXT));
567 pRD->ControlPC = ::GetIP(&m_ctx);
568 pRD->SP = ::GetSP(&m_ctx);
570 // Update the integer registers in KNONVOLATILE_CONTEXT_POINTERS from
571 // the exception context we have.
572 pRD->pCurrentContextPointers->S1 = (PDWORD64)&m_ctx.S1;
573 pRD->pCurrentContextPointers->S2 = (PDWORD64)&m_ctx.S2;
574 pRD->pCurrentContextPointers->S3 = (PDWORD64)&m_ctx.S3;
575 pRD->pCurrentContextPointers->S4 = (PDWORD64)&m_ctx.S4;
576 pRD->pCurrentContextPointers->S5 = (PDWORD64)&m_ctx.S5;
577 pRD->pCurrentContextPointers->S6 = (PDWORD64)&m_ctx.S6;
578 pRD->pCurrentContextPointers->S7 = (PDWORD64)&m_ctx.S7;
579 pRD->pCurrentContextPointers->S8 = (PDWORD64)&m_ctx.S8;
580 pRD->pCurrentContextPointers->S9 = (PDWORD64)&m_ctx.S9;
581 pRD->pCurrentContextPointers->S10 = (PDWORD64)&m_ctx.S10;
582 pRD->pCurrentContextPointers->S11 = (PDWORD64)&m_ctx.S11;
583 pRD->pCurrentContextPointers->Fp = (PDWORD64)&m_ctx.Fp;
584 pRD->pCurrentContextPointers->Gp = (PDWORD64)&m_ctx.Gp;
585 pRD->pCurrentContextPointers->Tp = (PDWORD64)&m_ctx.Tp;
586 pRD->pCurrentContextPointers->Ra = (PDWORD64)&m_ctx.Ra;
588 ClearRegDisplayArgumentAndScratchRegisters(pRD);
590 pRD->IsCallerContextValid = FALSE;
591 pRD->IsCallerSPValid = FALSE; // Don't add usage of this field. This is only temporary.
593 LOG((LF_GCROOTS, LL_INFO100000, "STACKWALK FaultingExceptionFrame::UpdateRegDisplay(pc:%p, sp:%p)\n", pRD->ControlPC, pRD->SP));
596 void InlinedCallFrame::UpdateRegDisplay(const PREGDISPLAY pRD)
602 #ifdef PROFILING_SUPPORTED
603 PRECONDITION(CORProfilerStackSnapshotEnabled() || InlinedCallFrame::FrameHasActiveCall(this));
611 if (!InlinedCallFrame::FrameHasActiveCall(this))
613 LOG((LF_CORDB, LL_ERROR, "WARNING: InlinedCallFrame::UpdateRegDisplay called on inactive frame %p\n", this));
617 pRD->IsCallerContextValid = FALSE;
618 pRD->IsCallerSPValid = FALSE;
620 pRD->pCurrentContext->Pc = *(DWORD64 *)&m_pCallerReturnAddress;
621 pRD->pCurrentContext->Sp = *(DWORD64 *)&m_pCallSiteSP;
622 pRD->pCurrentContext->Fp = *(DWORD64 *)&m_pCalleeSavedFP;
624 pRD->pCurrentContextPointers->S1 = NULL;
625 pRD->pCurrentContextPointers->S2 = NULL;
626 pRD->pCurrentContextPointers->S3 = NULL;
627 pRD->pCurrentContextPointers->S4 = NULL;
628 pRD->pCurrentContextPointers->S5 = NULL;
629 pRD->pCurrentContextPointers->S6 = NULL;
630 pRD->pCurrentContextPointers->S7 = NULL;
631 pRD->pCurrentContextPointers->S8 = NULL;
632 pRD->pCurrentContextPointers->S9 = NULL;
633 pRD->pCurrentContextPointers->S10 = NULL;
634 pRD->pCurrentContextPointers->S11 = NULL;
635 pRD->pCurrentContextPointers->Gp = NULL;
636 pRD->pCurrentContextPointers->Tp = NULL;
638 pRD->ControlPC = m_pCallerReturnAddress;
639 pRD->SP = (DWORD64) dac_cast<TADDR>(m_pCallSiteSP);
641 // reset pContext; it's only valid for active (top-most) frame
642 pRD->pContext = NULL;
644 ClearRegDisplayArgumentAndScratchRegisters(pRD);
647 // Update the frame pointer in the current context.
648 pRD->pCurrentContextPointers->Fp = &m_pCalleeSavedFP;
650 LOG((LF_GCROOTS, LL_INFO100000, "STACKWALK InlinedCallFrame::UpdateRegDisplay(pc:%p, sp:%p)\n", pRD->ControlPC, pRD->SP));
655 #ifdef FEATURE_HIJACK
656 TADDR ResumableFrame::GetReturnAddressPtr(void)
658 LIMITED_METHOD_DAC_CONTRACT;
659 return dac_cast<TADDR>(m_Regs) + offsetof(T_CONTEXT, Pc);
662 void ResumableFrame::UpdateRegDisplay(const PREGDISPLAY pRD)
673 CopyMemory(pRD->pCurrentContext, m_Regs, sizeof(T_CONTEXT));
675 pRD->ControlPC = m_Regs->Pc;
676 pRD->SP = m_Regs->Sp;
678 pRD->pCurrentContextPointers->S1 = &m_Regs->S1;
679 pRD->pCurrentContextPointers->S2 = &m_Regs->S2;
680 pRD->pCurrentContextPointers->S3 = &m_Regs->S3;
681 pRD->pCurrentContextPointers->S4 = &m_Regs->S4;
682 pRD->pCurrentContextPointers->S5 = &m_Regs->S5;
683 pRD->pCurrentContextPointers->S6 = &m_Regs->S6;
684 pRD->pCurrentContextPointers->S7 = &m_Regs->S7;
685 pRD->pCurrentContextPointers->S8 = &m_Regs->S8;
686 pRD->pCurrentContextPointers->S9 = &m_Regs->S9;
687 pRD->pCurrentContextPointers->S10 = &m_Regs->S10;
688 pRD->pCurrentContextPointers->S11 = &m_Regs->S11;
689 pRD->pCurrentContextPointers->Tp = &m_Regs->Tp;
690 pRD->pCurrentContextPointers->Gp = &m_Regs->Gp;
691 pRD->pCurrentContextPointers->Fp = &m_Regs->Fp;
692 pRD->pCurrentContextPointers->Ra = &m_Regs->Ra;
694 pRD->volatileCurrContextPointers.R0 = &m_Regs->R0;
695 pRD->volatileCurrContextPointers.A0 = &m_Regs->A0;
696 pRD->volatileCurrContextPointers.A1 = &m_Regs->A1;
697 pRD->volatileCurrContextPointers.A2 = &m_Regs->A2;
698 pRD->volatileCurrContextPointers.A3 = &m_Regs->A3;
699 pRD->volatileCurrContextPointers.A4 = &m_Regs->A4;
700 pRD->volatileCurrContextPointers.A5 = &m_Regs->A5;
701 pRD->volatileCurrContextPointers.A6 = &m_Regs->A6;
702 pRD->volatileCurrContextPointers.A7 = &m_Regs->A7;
703 pRD->volatileCurrContextPointers.T0 = &m_Regs->T0;
704 pRD->volatileCurrContextPointers.T1 = &m_Regs->T1;
705 pRD->volatileCurrContextPointers.T2 = &m_Regs->T2;
706 pRD->volatileCurrContextPointers.T3 = &m_Regs->T3;
707 pRD->volatileCurrContextPointers.T4 = &m_Regs->T4;
708 pRD->volatileCurrContextPointers.T5 = &m_Regs->T5;
709 pRD->volatileCurrContextPointers.T6 = &m_Regs->T6;
711 pRD->IsCallerContextValid = FALSE;
712 pRD->IsCallerSPValid = FALSE; // Don't add usage of this field. This is only temporary.
714 LOG((LF_GCROOTS, LL_INFO100000, "STACKWALK ResumableFrame::UpdateRegDisplay(pc:%p, sp:%p)\n", pRD->ControlPC, pRD->SP));
719 void HijackFrame::UpdateRegDisplay(const PREGDISPLAY pRD)
721 LIMITED_METHOD_CONTRACT;
723 pRD->IsCallerContextValid = FALSE;
724 pRD->IsCallerSPValid = FALSE;
726 pRD->pCurrentContext->Pc = m_ReturnAddress;
727 size_t s = sizeof(struct HijackArgs);
728 _ASSERTE(s%8 == 0); // HijackArgs contains register values and hence will be a multiple of 8
729 // stack must be multiple of 16. So if s is not multiple of 16 then there must be padding of 8 bytes
731 pRD->pCurrentContext->Sp = PTR_TO_TADDR(m_Args) + s ;
733 pRD->pCurrentContext->S1 = m_Args->S1;
734 pRD->pCurrentContext->S2 = m_Args->S2;
735 pRD->pCurrentContext->S3 = m_Args->S3;
736 pRD->pCurrentContext->S4 = m_Args->S4;
737 pRD->pCurrentContext->S5 = m_Args->S5;
738 pRD->pCurrentContext->S6 = m_Args->S6;
739 pRD->pCurrentContext->S7 = m_Args->S7;
740 pRD->pCurrentContext->S8 = m_Args->S8;
741 pRD->pCurrentContext->S9 = m_Args->S9;
742 pRD->pCurrentContext->S10 = m_Args->S10;
743 pRD->pCurrentContext->S11 = m_Args->S11;
744 pRD->pCurrentContext->Gp = m_Args->Gp;
745 pRD->pCurrentContext->Tp = m_Args->Tp;
746 pRD->pCurrentContext->Fp = m_Args->Fp;
747 pRD->pCurrentContext->Ra = m_Args->Ra;
749 pRD->pCurrentContextPointers->S1 = &m_Args->S1;
750 pRD->pCurrentContextPointers->S2 = &m_Args->S2;
751 pRD->pCurrentContextPointers->S3 = &m_Args->S3;
752 pRD->pCurrentContextPointers->S4 = &m_Args->S4;
753 pRD->pCurrentContextPointers->S5 = &m_Args->S5;
754 pRD->pCurrentContextPointers->S6 = &m_Args->S6;
755 pRD->pCurrentContextPointers->S7 = &m_Args->S7;
756 pRD->pCurrentContextPointers->S8 = &m_Args->S8;
757 pRD->pCurrentContextPointers->S9 = &m_Args->S9;
758 pRD->pCurrentContextPointers->S10 = &m_Args->S10;
759 pRD->pCurrentContextPointers->S11 = &m_Args->S11;
760 pRD->pCurrentContextPointers->Gp = &m_Args->Gp;
761 pRD->pCurrentContextPointers->Tp = &m_Args->Tp;
762 pRD->pCurrentContextPointers->Fp = &m_Args->Fp;
763 pRD->pCurrentContextPointers->Ra = NULL;
764 SyncRegDisplayToCurrentContext(pRD);
766 LOG((LF_GCROOTS, LL_INFO100000, "STACKWALK HijackFrame::UpdateRegDisplay(pc:%p, sp:%p)\n", pRD->ControlPC, pRD->SP));
768 #endif // FEATURE_HIJACK
770 #ifdef FEATURE_COMINTEROP
772 void emitCOMStubCall (ComCallMethodDesc *pCOMMethodRX, ComCallMethodDesc *pCOMMethodRW, PCODE target)
774 _ASSERTE(!"RISCV64: not implementation on riscv64!!!");
776 #endif // FEATURE_COMINTEROP
780 _ASSERTE(!"RISCV64:NYI");
783 #if !defined(DACCESS_COMPILE)
784 EXTERN_C void JIT_UpdateWriteBarrierState(bool skipEphemeralCheck, size_t writeableOffset);
786 extern "C" void STDCALL JIT_PatchedCodeStart();
787 extern "C" void STDCALL JIT_PatchedCodeLast();
789 static void UpdateWriteBarrierState(bool skipEphemeralCheck)
791 BYTE *writeBarrierCodeStart = GetWriteBarrierCodeLocation((void*)JIT_PatchedCodeStart);
792 BYTE *writeBarrierCodeStartRW = writeBarrierCodeStart;
793 ExecutableWriterHolderNoLog<BYTE> writeBarrierWriterHolder;
794 if (IsWriteBarrierCopyEnabled())
796 writeBarrierWriterHolder.AssignExecutableWriterHolder(writeBarrierCodeStart, (BYTE*)JIT_PatchedCodeLast - (BYTE*)JIT_PatchedCodeStart);
797 writeBarrierCodeStartRW = writeBarrierWriterHolder.GetRW();
799 JIT_UpdateWriteBarrierState(GCHeapUtilities::IsServerHeap(), writeBarrierCodeStartRW - writeBarrierCodeStart);
802 void InitJITHelpers1()
804 STANDARD_VM_CONTRACT;
806 _ASSERTE(g_SystemInfo.dwNumberOfProcessors != 0);
808 // Allocation helpers, faster but non-logging
809 if (!((TrackAllocationsEnabled()) ||
810 (LoggingOn(LF_GCALLOC, LL_INFO10))
812 || (g_pConfig->ShouldInjectFault(INJECTFAULT_GCHEAP) != 0)
816 if (GCHeapUtilities::UseThreadAllocationContexts())
818 SetJitHelperFunction(CORINFO_HELP_NEWSFAST, JIT_NewS_MP_FastPortable);
819 SetJitHelperFunction(CORINFO_HELP_NEWSFAST_ALIGN8, JIT_NewS_MP_FastPortable);
820 SetJitHelperFunction(CORINFO_HELP_NEWARR_1_VC, JIT_NewArr1VC_MP_FastPortable);
821 SetJitHelperFunction(CORINFO_HELP_NEWARR_1_OBJ, JIT_NewArr1OBJ_MP_FastPortable);
823 ECall::DynamicallyAssignFCallImpl(GetEEFuncEntryPoint(AllocateString_MP_FastPortable), ECall::FastAllocateString);
827 UpdateWriteBarrierState(GCHeapUtilities::IsServerHeap());
831 void UpdateWriteBarrierState(bool) {}
832 #endif // !defined(DACCESS_COMPILE)
834 PTR_CONTEXT GetCONTEXTFromRedirectedStubStackFrame(T_DISPATCHER_CONTEXT * pDispatcherContext)
836 LIMITED_METHOD_DAC_CONTRACT;
838 DWORD64 stackSlot = pDispatcherContext->EstablisherFrame + REDIRECTSTUB_SP_OFFSET_CONTEXT;
839 PTR_PTR_CONTEXT ppContext = dac_cast<PTR_PTR_CONTEXT>((TADDR)stackSlot);
843 PTR_CONTEXT GetCONTEXTFromRedirectedStubStackFrame(T_CONTEXT * pContext)
845 LIMITED_METHOD_DAC_CONTRACT;
847 DWORD64 stackSlot = pContext->Sp + REDIRECTSTUB_SP_OFFSET_CONTEXT;
848 PTR_PTR_CONTEXT ppContext = dac_cast<PTR_PTR_CONTEXT>((TADDR)stackSlot);
852 #if !defined(DACCESS_COMPILE)
853 FaultingExceptionFrame *GetFrameFromRedirectedStubStackFrame (DISPATCHER_CONTEXT *pDispatcherContext)
855 _ASSERTE(!"RISCV64: not implementation on riscv64!!!");
856 LIMITED_METHOD_CONTRACT;
858 return (FaultingExceptionFrame*)NULL;
863 AdjustContextForVirtualStub(
864 EXCEPTION_RECORD *pExceptionRecord,
867 LIMITED_METHOD_CONTRACT;
869 Thread * pThread = GetThreadNULLOk();
871 // We may not have a managed thread object. Example is an AV on the helper thread.
872 // (perhaps during StubManager::IsStub)
878 PCODE f_IP = GetIP(pContext);
880 StubCodeBlockKind sk = RangeSectionStubManager::GetStubKind(f_IP);
882 if (sk == STUB_CODE_BLOCK_VSD_DISPATCH_STUB)
884 if (*PTR_DWORD(f_IP - 4) != DISPATCH_STUB_FIRST_DWORD)
886 _ASSERTE(!"AV in DispatchStub at unknown instruction");
891 if (sk == STUB_CODE_BLOCK_VSD_RESOLVE_STUB)
893 if (*PTR_DWORD(f_IP) != RESOLVE_STUB_FIRST_DWORD)
895 _ASSERTE(!"AV in ResolveStub at unknown instruction");
904 PCODE callsite = GetAdjustedCallAddress(GetRA(pContext));
906 // Lr must already have been saved before calling so it should not be necessary to restore Lr
908 if (pExceptionRecord != NULL)
910 pExceptionRecord->ExceptionAddress = (PVOID)callsite;
912 SetIP(pContext, callsite);
916 #endif // !DACCESS_COMPILE
918 UMEntryThunk * UMEntryThunk::Decode(void *pCallback)
920 _ASSERTE(offsetof(UMEntryThunkCode, m_code) == 0);
921 UMEntryThunkCode * pCode = (UMEntryThunkCode*)pCallback;
923 // We may be called with an unmanaged external code pointer instead. So if it doesn't look like one of our
924 // stubs (see UMEntryThunkCode::Encode below) then we'll return NULL. Luckily in these scenarios our
925 // caller will perform a hash lookup on successful return to verify our result in case random unmanaged
926 // code happens to look like ours.
927 if ((pCode->m_code[0] == 0x00000f97) && // auipc t6, 0
928 (pCode->m_code[1] == 0x018fb383) && // ld t2, 24(t6)
929 (pCode->m_code[2] == 0x010fbf83) && // ld t6, 16(t6)
930 (pCode->m_code[3] == 0x000f8067)) // jalr x0, 0(t6)
932 return (UMEntryThunk*)pCode->m_pvSecretParam;
938 void UMEntryThunkCode::Encode(UMEntryThunkCode *pEntryThunkCodeRX, BYTE* pTargetCode, void* pvSecretParam)
944 // m_pTargetCode data
945 // m_pvSecretParam data
947 m_code[0] = 0x00000f97; // auipc t6, 0
948 m_code[1] = 0x018fb383; // ld t2, 24(t6)
949 m_code[2] = 0x010fbf83; // ld t6, 16(t6)
950 m_code[3] = 0x000f8067; // jalr x0, 0(t6)
952 m_pTargetCode = (TADDR)pTargetCode;
953 m_pvSecretParam = (TADDR)pvSecretParam;
954 FlushInstructionCache(GetCurrentProcess(),&pEntryThunkCodeRX->m_code,sizeof(m_code));
957 #ifndef DACCESS_COMPILE
959 void UMEntryThunkCode::Poison()
961 ExecutableWriterHolder<UMEntryThunkCode> thunkWriterHolder(this, sizeof(UMEntryThunkCode));
962 UMEntryThunkCode *pThisRW = thunkWriterHolder.GetRW();
964 pThisRW->m_pTargetCode = (TADDR)UMEntryThunk::ReportViolation;
967 pThisRW->m_code[1] = 0x018fb503;
969 ClrFlushInstructionCache(&m_code,sizeof(m_code));
972 #endif // DACCESS_COMPILE
974 #if !defined(DACCESS_COMPILE)
975 VOID ResetCurrentContext()
977 LIMITED_METHOD_CONTRACT;
981 LONG CLRNoCatchHandler(EXCEPTION_POINTERS* pExceptionInfo, PVOID pv)
983 return EXCEPTION_CONTINUE_SEARCH;
986 void FlushWriteBarrierInstructionCache()
988 // this wouldn't be called in arm64, just to comply with gchelpers.h
991 int StompWriteBarrierEphemeral(bool isRuntimeSuspended)
993 UpdateWriteBarrierState(GCHeapUtilities::IsServerHeap());
997 int StompWriteBarrierResize(bool isRuntimeSuspended, bool bReqUpperBoundsCheck)
999 UpdateWriteBarrierState(GCHeapUtilities::IsServerHeap());
1003 #ifdef FEATURE_USE_SOFTWARE_WRITE_WATCH_FOR_GC_HEAP
1004 int SwitchToWriteWatchBarrier(bool isRuntimeSuspended)
1006 UpdateWriteBarrierState(GCHeapUtilities::IsServerHeap());
1010 int SwitchToNonWriteWatchBarrier(bool isRuntimeSuspended)
1012 UpdateWriteBarrierState(GCHeapUtilities::IsServerHeap());
1015 #endif // FEATURE_USE_SOFTWARE_WRITE_WATCH_FOR_GC_HEAP
1017 #ifdef DACCESS_COMPILE
1018 BOOL GetAnyThunkTarget (T_CONTEXT *pctx, TADDR *pTarget, TADDR *pTargetMethodDesc)
1020 _ASSERTE(!"RISCV64:NYI");
1023 #endif // DACCESS_COMPILE
1025 #ifndef DACCESS_COMPILE
1026 // ----------------------------------------------------------------
1027 // StubLinkerCPU methods
1028 // ----------------------------------------------------------------
1030 void StubLinkerCPU::EmitMovConstant(IntReg reg, UINT64 imm)
1032 // Adaptation of emitLoadImmediate
1034 if (isValidSimm12(imm))
1036 EmitAddImm(reg, 0 /* zero register */, imm & 0xFFF);
1040 // TODO-RISCV64: maybe optimized via emitDataConst(), check #86790
1045 BitScanReverse64(&msb, imm);
1049 high31 = (imm >> (msb - 30)) & 0x7FffFFff;
1053 high31 = imm & 0x7FffFFff;
1056 // Since ADDIW use sign extension for immediate
1057 // we have to adjust higher 19 bit loaded by LUI
1058 // for case when low part is bigger than 0x800.
1059 UINT32 high19 = (high31 + 0x800) >> 12;
1061 EmitLuImm(reg, high19);
1064 // EmitAddImm does not allow negative immediate values, so use EmitSubImm.
1065 EmitSubImm(reg, reg, ~high31 + 1 & 0xFFF);
1069 EmitAddImm(reg, reg, high31 & 0x7FF);
1072 // And load remaining part by batches of 11 bits size.
1073 INT32 remainingShift = msb - 30;
1075 // shiftAccumulator usage is an optimization allows to exclude `slli addi` iteration
1076 // if immediate bits `low11` for this iteration are zero.
1077 UINT32 shiftAccumulator = 0;
1079 while (remainingShift > 0)
1081 UINT32 shift = remainingShift >= 11 ? 11 : remainingShift % 11;
1082 UINT32 mask = 0x7ff >> (11 - shift);
1083 remainingShift -= shift;
1084 UINT32 low11 = (imm >> remainingShift) & mask;
1085 shiftAccumulator += shift;
1089 EmitSllImm(reg, reg, shiftAccumulator);
1090 shiftAccumulator = 0;
1092 EmitAddImm(reg, reg, low11);
1096 if (shiftAccumulator)
1098 EmitSllImm(reg, reg, shiftAccumulator);
1102 void StubLinkerCPU::EmitJumpRegister(IntReg regTarget)
1104 Emit32(0x00000067 | (regTarget << 15));
1107 // Instruction types as per RISC-V Spec, Chapter 24 RV32/64G Instruction Set Listings
1108 static unsigned ITypeInstr(unsigned opcode, unsigned funct3, unsigned rd, unsigned rs1, int imm12)
1110 _ASSERTE(!(opcode >> 7));
1111 _ASSERTE(!(funct3 >> 3));
1112 _ASSERTE(!(rd >> 5));
1113 _ASSERTE(!(rs1 >> 5));
1114 _ASSERTE(StubLinkerCPU::isValidSimm12(imm12));
1115 return opcode | (rd << 7) | (funct3 << 12) | (rs1 << 15) | (imm12 << 20);
1118 static unsigned STypeInstr(unsigned opcode, unsigned funct3, unsigned rs1, unsigned rs2, int imm12)
1120 _ASSERTE(!(opcode >> 7));
1121 _ASSERTE(!(funct3 >> 3));
1122 _ASSERTE(!(rs1 >> 5));
1123 _ASSERTE(!(rs2 >> 5));
1124 _ASSERTE(StubLinkerCPU::isValidSimm12(imm12));
1125 int immLo5 = imm12 & 0x1f;
1126 int immHi7 = (imm12 >> 5) & 0x7f;
1127 return opcode | (immLo5 << 7) | (funct3 << 12) | (rs1 << 15) | (rs2 << 20) | (immHi7 << 25);
1130 static unsigned RTypeInstr(unsigned opcode, unsigned funct3, unsigned funct7, unsigned rd, unsigned rs1, unsigned rs2)
1132 _ASSERTE(!(opcode >> 7));
1133 _ASSERTE(!(funct3 >> 3));
1134 _ASSERTE(!(funct7 >> 7));
1135 _ASSERTE(!(rd >> 5));
1136 _ASSERTE(!(rs1 >> 5));
1137 _ASSERTE(!(rs2 >> 5));
1138 return opcode | (rd << 7) | (funct3 << 12) | (rs1 << 15) | (rs2 << 20) | (funct7 << 25);
1141 void StubLinkerCPU::EmitLoad(IntReg dest, IntReg srcAddr, int offset)
1143 Emit32(ITypeInstr(0x3, 0x3, dest, srcAddr, offset)); // ld
1145 void StubLinkerCPU::EmitLoad(FloatReg dest, IntReg srcAddr, int offset)
1147 Emit32(ITypeInstr(0x7, 0x3, dest, srcAddr, offset)); // fld
1150 void StubLinkerCPU:: EmitStore(IntReg src, IntReg destAddr, int offset)
1152 Emit32(STypeInstr(0x23, 0x3, destAddr, src, offset)); // sd
1154 void StubLinkerCPU::EmitStore(FloatReg src, IntReg destAddr, int offset)
1156 Emit32(STypeInstr(0x27, 0x3, destAddr, src, offset)); // fsd
1159 void StubLinkerCPU::EmitMovReg(IntReg Xd, IntReg Xm)
1161 EmitAddImm(Xd, Xm, 0);
1163 void StubLinkerCPU::EmitMovReg(FloatReg dest, FloatReg source)
1165 Emit32(RTypeInstr(0x53, 0, 0x11, dest, source, source)); // fsgnj.d
1168 void StubLinkerCPU::EmitSubImm(IntReg Xd, IntReg Xn, unsigned int value)
1170 _ASSERTE(value <= 0x800);
1171 EmitAddImm(Xd, Xn, ~value + 0x1);
1173 void StubLinkerCPU::EmitAddImm(IntReg Xd, IntReg Xn, unsigned int value)
1175 Emit32(ITypeInstr(0x13, 0, Xd, Xn, value)); // addi
1177 void StubLinkerCPU::EmitSllImm(IntReg Xd, IntReg Xn, unsigned int value)
1179 _ASSERTE(!(value >> 6));
1180 Emit32(ITypeInstr(0x13, 0x1, Xd, Xn, value)); // slli
1182 void StubLinkerCPU::EmitLuImm(IntReg Xd, unsigned int value)
1184 _ASSERTE(value <= 0xFFFFF);
1185 Emit32((DWORD)(0x00000037 | (value << 12) | (Xd << 7))); // lui Xd, value
1188 void StubLinkerCPU::Init()
1190 new (gBranchIF) BranchInstructionFormat();
1193 // Emits code to adjust arguments for static delegate target.
1194 VOID StubLinkerCPU::EmitShuffleThunk(ShuffleEntry *pShuffleEntryArray)
1196 static const int argRegBase = 10; // first argument register: a0, fa0
1197 static const IntReg t6 = 31, t5 = 30, a0 = argRegBase + 0;
1198 // On entry a0 holds the delegate instance. Look up the real target address stored in the MethodPtrAux
1199 // field and saved in t6. Tailcall to the target method after re-arranging the arguments
1200 EmitLoad(t6, a0, DelegateObject::GetOffsetOfMethodPtrAux());
1201 // load the indirection cell into t5 used by ResolveWorkerAsmStub
1202 EmitAddImm(t5, a0, DelegateObject::GetOffsetOfMethodPtrAux());
1204 int delay_index[8] = {-1};
1205 bool is_store = false;
1208 for (ShuffleEntry* pEntry = pShuffleEntryArray; pEntry->srcofs != ShuffleEntry::SENTINEL; pEntry++, i++)
1210 if (pEntry->srcofs & ShuffleEntry::REGMASK)
1212 // Source in register, destination in register
1214 // Both the srcofs and dstofs must be of the same kind of registers - float or general purpose.
1215 // If source is present in register then destination may be a stack-slot.
1216 _ASSERTE(((pEntry->dstofs & ShuffleEntry::FPREGMASK) == (pEntry->srcofs & ShuffleEntry::FPREGMASK)) || !(pEntry->dstofs & (ShuffleEntry::FPREGMASK | ShuffleEntry::REGMASK)));
1217 _ASSERTE((pEntry->dstofs & ShuffleEntry::OFSREGMASK) <= 8);//should amend for offset!
1218 _ASSERTE((pEntry->srcofs & ShuffleEntry::OFSREGMASK) <= 8);
1220 if (pEntry->srcofs & ShuffleEntry::FPREGMASK)
1223 while (pEntry[j].srcofs & ShuffleEntry::FPREGMASK)
1227 assert((pEntry->dstofs - pEntry->srcofs) == index);
1230 int tmp_reg = 0; // ft0.
1231 ShuffleEntry* tmp_entry = pShuffleEntryArray + delay_index[0];
1234 EmitLoad(FloatReg(tmp_reg), RegSp, tmp_entry->srcofs * sizeof(void*));
1241 tmp_entry = pEntry + j;
1243 while (pEntry[j].srcofs & ShuffleEntry::FPREGMASK)
1245 FloatReg src = (pEntry[j].srcofs & ShuffleEntry::OFSREGMASK) + argRegBase;
1246 if (pEntry[j].dstofs & ShuffleEntry::FPREGMASK) {
1247 FloatReg dst = (pEntry[j].dstofs & ShuffleEntry::OFSREGMASK) + argRegBase;
1248 EmitMovReg(dst, src);
1252 EmitStore(src, RegSp, pEntry[j].dstofs * sizeof(void*));
1257 assert(tmp_reg <= 7);
1261 EmitMovReg(FloatReg(index + argRegBase), FloatReg(tmp_reg));
1269 assert(pEntry->dstofs & ShuffleEntry::REGMASK);
1270 IntReg dst = (pEntry->dstofs & ShuffleEntry::OFSREGMASK) + argRegBase;
1271 IntReg src = (pEntry->srcofs & ShuffleEntry::OFSREGMASK) + argRegBase;
1273 EmitMovReg(dst, src);
1276 else if (pEntry->dstofs & ShuffleEntry::REGMASK)
1278 // source must be on the stack
1279 _ASSERTE(!(pEntry->srcofs & ShuffleEntry::REGMASK));
1281 int dstReg = (pEntry->dstofs & ShuffleEntry::OFSREGMASK) + argRegBase;
1282 int srcOfs = (pEntry->srcofs & ShuffleEntry::OFSMASK) * sizeof(void*);
1283 if (pEntry->dstofs & ShuffleEntry::FPREGMASK)
1287 delay_index[index++] = i;
1290 EmitLoad(FloatReg(dstReg), RegSp, srcOfs);
1294 EmitLoad(IntReg(dstReg), RegSp, srcOfs);
1299 // source & dest must be on the stack
1300 _ASSERTE(!(pEntry->srcofs & ShuffleEntry::REGMASK));
1301 _ASSERTE(!(pEntry->dstofs & ShuffleEntry::REGMASK));
1304 EmitLoad(t4, RegSp, pEntry->srcofs * sizeof(void*));
1305 EmitStore(t4, RegSp, pEntry->dstofs * sizeof(void*));
1308 // Tailcall to target
1310 EmitJumpRegister(t6);
1313 // Emits code to adjust arguments for static delegate target.
1314 VOID StubLinkerCPU::EmitComputedInstantiatingMethodStub(MethodDesc* pSharedMD, struct ShuffleEntry *pShuffleEntryArray, void* extraArg)
1316 STANDARD_VM_CONTRACT;
1318 for (ShuffleEntry* pEntry = pShuffleEntryArray; pEntry->srcofs != ShuffleEntry::SENTINEL; pEntry++)
1320 _ASSERTE(pEntry->dstofs & ShuffleEntry::REGMASK);
1321 _ASSERTE(pEntry->srcofs & ShuffleEntry::REGMASK);
1322 _ASSERTE(!(pEntry->dstofs & ShuffleEntry::FPREGMASK));
1323 _ASSERTE(!(pEntry->srcofs & ShuffleEntry::FPREGMASK));
1324 _ASSERTE(pEntry->dstofs != ShuffleEntry::HELPERREG);
1325 _ASSERTE(pEntry->srcofs != ShuffleEntry::HELPERREG);
1327 EmitMovReg(IntReg((pEntry->dstofs & ShuffleEntry::OFSREGMASK) + 10), IntReg((pEntry->srcofs & ShuffleEntry::OFSREGMASK) + 10));
1330 MetaSig msig(pSharedMD);
1331 ArgIterator argit(&msig);
1333 if (argit.HasParamType())
1335 ArgLocDesc sInstArgLoc;
1336 argit.GetParamTypeLoc(&sInstArgLoc);
1337 int regHidden = sInstArgLoc.m_idxGenReg;
1338 _ASSERTE(regHidden != -1);
1339 regHidden += 10;//NOTE: RISCV64 should start at a0=10;
1341 if (extraArg == NULL)
1343 if (pSharedMD->RequiresInstMethodTableArg())
1345 // Unboxing stub case
1346 // Fill param arg with methodtable of this pointer
1347 // ld regHidden, a0, 0
1348 EmitLoad(IntReg(regHidden), IntReg(10));
1353 EmitMovConstant(IntReg(regHidden), (UINT64)extraArg);
1357 if (extraArg == NULL)
1359 // Unboxing stub case
1360 // Address of the value type is address of the boxed instance plus sizeof(MethodDesc*).
1361 // addi a0, a0, sizeof(MethodDesc*)
1362 EmitAddImm(IntReg(10), IntReg(10), sizeof(MethodDesc*));
1365 // Tail call the real target.
1366 EmitCallManagedMethod(pSharedMD, TRUE /* tail call */);
1367 SetTargetMethod(pSharedMD);
1370 void StubLinkerCPU::EmitCallLabel(CodeLabel *target, BOOL fTailCall, BOOL fIndirect)
1372 BranchInstructionFormat::VariationCodes variationCode = BranchInstructionFormat::VariationCodes::BIF_VAR_JUMP;
1374 variationCode = static_cast<BranchInstructionFormat::VariationCodes>(variationCode | BranchInstructionFormat::VariationCodes::BIF_VAR_CALL);
1376 variationCode = static_cast<BranchInstructionFormat::VariationCodes>(variationCode | BranchInstructionFormat::VariationCodes::BIF_VAR_INDIRECT);
1378 EmitLabelRef(target, reinterpret_cast<BranchInstructionFormat&>(gBranchIF), (UINT)variationCode);
1381 void StubLinkerCPU::EmitCallManagedMethod(MethodDesc *pMD, BOOL fTailCall)
1383 // Use direct call if possible.
1384 if (pMD->HasStableEntryPoint())
1386 EmitCallLabel(NewExternalCodeLabel((LPVOID)pMD->GetStableEntryPoint()), fTailCall, FALSE);
1390 EmitCallLabel(NewExternalCodeLabel((LPVOID)pMD->GetAddrOfSlot()), fTailCall, TRUE);
1395 #ifdef FEATURE_READYTORUN
1398 // Allocation of dynamic helpers
1401 #define DYNAMIC_HELPER_ALIGNMENT sizeof(TADDR)
1403 #define BEGIN_DYNAMIC_HELPER_EMIT(size) \
1404 _ASSERTE(!"RISCV64: not implementation on riscv64!!!");
1405 #define END_DYNAMIC_HELPER_EMIT() \
1406 _ASSERTE(!"RISCV64: not implementation on riscv64!!!");
1408 // Uses x8 as scratch register to store address of data label
1409 // After load x8 is increment to point to next data
1410 // only accepts positive offsets
1411 static void LoadRegPair(BYTE* p, int reg1, int reg2, UINT32 offset)
1413 _ASSERTE(!"RISCV64: not implementation on riscv64!!!");
1416 PCODE DynamicHelpers::CreateHelper(LoaderAllocator * pAllocator, TADDR arg, PCODE target)
1418 _ASSERTE(!"RISCV64: not implementation on riscv64!!!");
1422 // Caller must ensure sufficient byte are allocated including padding (if applicable)
1423 void DynamicHelpers::EmitHelperWithArg(BYTE*& p, size_t rxOffset, LoaderAllocator * pAllocator, TADDR arg, PCODE target)
1425 _ASSERTE(!"RISCV64: not implementation on riscv64!!!");
1428 PCODE DynamicHelpers::CreateHelperWithArg(LoaderAllocator * pAllocator, TADDR arg, PCODE target)
1430 _ASSERTE(!"RISCV64: not implementation on riscv64!!!");
1434 PCODE DynamicHelpers::CreateHelper(LoaderAllocator * pAllocator, TADDR arg, TADDR arg2, PCODE target)
1436 _ASSERTE(!"RISCV64: not implementation on riscv64!!!");
1440 PCODE DynamicHelpers::CreateHelperArgMove(LoaderAllocator * pAllocator, TADDR arg, PCODE target)
1442 _ASSERTE(!"RISCV64: not implementation on riscv64!!!");
1446 PCODE DynamicHelpers::CreateReturn(LoaderAllocator * pAllocator)
1448 _ASSERTE(!"RISCV64: not implementation on riscv64!!!");
1452 PCODE DynamicHelpers::CreateReturnConst(LoaderAllocator * pAllocator, TADDR arg)
1454 _ASSERTE(!"RISCV64: not implementation on riscv64!!!");
1458 PCODE DynamicHelpers::CreateReturnIndirConst(LoaderAllocator * pAllocator, TADDR arg, INT8 offset)
1460 _ASSERTE(!"RISCV64: not implementation on riscv64!!!");
1464 PCODE DynamicHelpers::CreateHelperWithTwoArgs(LoaderAllocator * pAllocator, TADDR arg, PCODE target)
1466 _ASSERTE(!"RISCV64: not implementation on riscv64!!!");
1470 PCODE DynamicHelpers::CreateHelperWithTwoArgs(LoaderAllocator * pAllocator, TADDR arg, TADDR arg2, PCODE target)
1472 _ASSERTE(!"RISCV64: not implementation on riscv64!!!");
1476 PCODE DynamicHelpers::CreateDictionaryLookupHelper(LoaderAllocator * pAllocator, CORINFO_RUNTIME_LOOKUP * pLookup, DWORD dictionaryIndexAndSlot, Module * pModule)
1478 _ASSERTE(!"RISCV64: not implementation on riscv64!!!");
1481 #endif // FEATURE_READYTORUN
1484 #endif // #ifndef DACCESS_COMPILE