From af2e9f47a6be6ac9acb435c63f5c57823b3952f2 Mon Sep 17 00:00:00 2001 From: Jan Vorlicek Date: Thu, 16 Apr 2020 10:53:48 +0200 Subject: [PATCH] Fix null reference handling in VSD stubs on amd64 When null "this" pointer is passed to the VSD dispatch and resolve stubs on amd64, it is not converted to NullReferenceException and it generates AccessViolationException instead, tearing down the process. This happens due to the fact that AdjustContextForVirtualStub always returns FALSE on amd64. It behaves correctly on ARM, x86 and ARM64. This issue has been present at least since .NET Framework 4.5. This change fixes it by implementing the AdjustContextForVirtualStub for amd64 too. I've also found that we were doing this adjustment for ARM and x86 only at some places while ARM64 needs it too. So I've made it unconditional for all architectures at all places. --- src/coreclr/src/vm/amd64/excepamd64.cpp | 48 +++++++++++++++++++++++-- src/coreclr/src/vm/amd64/virtualcallstubcpu.hpp | 30 ++++++++-------- 2 files changed, 62 insertions(+), 16 deletions(-) diff --git a/src/coreclr/src/vm/amd64/excepamd64.cpp b/src/coreclr/src/vm/amd64/excepamd64.cpp index 4fc7282..bfe2ca6 100644 --- a/src/coreclr/src/vm/amd64/excepamd64.cpp +++ b/src/coreclr/src/vm/amd64/excepamd64.cpp @@ -26,6 +26,7 @@ #include "asmconstants.h" #include "exceptionhandling.h" +#include "virtualcallstub.h" @@ -573,9 +574,52 @@ AdjustContextForVirtualStub( { LIMITED_METHOD_CONTRACT; - // Nothing to adjust + Thread * pThread = GetThread(); - return FALSE; + // We may not have a managed thread object. Example is an AV on the helper thread. + // (perhaps during StubManager::IsStub) + if (pThread == NULL) + { + return FALSE; + } + + PCODE f_IP = GetIP(pContext); + + VirtualCallStubManager::StubKind sk; + VirtualCallStubManager::FindStubManager(f_IP, &sk); + + if (sk == VirtualCallStubManager::SK_DISPATCH) + { + if ((*PTR_DWORD(f_IP) & 0xffffff) != X64_INSTR_CMP_IND_THIS_REG_RAX) // cmp [THIS_REG], rax + { + _ASSERTE(!"AV in DispatchStub at unknown instruction"); + return FALSE; + } + } + else + if (sk == VirtualCallStubManager::SK_RESOLVE) + { + if ((*PTR_DWORD(f_IP) & 0xffffff) != X64_INSTR_MOV_RAX_IND_THIS_REG) // mov rax, [THIS_REG] + { + _ASSERTE(!"AV in ResolveStub at unknown instruction"); + return FALSE; + } + SetSP(pContext, dac_cast(dac_cast(GetSP(pContext)) + sizeof(void*))); // rollback push rdx + } + else + { + return FALSE; + } + + PCODE callsite = *dac_cast(GetSP(pContext)); + if (pExceptionRecord != NULL) + { + pExceptionRecord->ExceptionAddress = (PVOID)callsite; + } + SetIP(pContext, callsite); + SetSP(pContext, dac_cast(dac_cast(GetSP(pContext)) + sizeof(void*))); // Move SP to where it was at the call site + + return TRUE; } #endif diff --git a/src/coreclr/src/vm/amd64/virtualcallstubcpu.hpp b/src/coreclr/src/vm/amd64/virtualcallstubcpu.hpp index 510f605..78d8f77 100644 --- a/src/coreclr/src/vm/amd64/virtualcallstubcpu.hpp +++ b/src/coreclr/src/vm/amd64/virtualcallstubcpu.hpp @@ -23,6 +23,16 @@ #pragma pack(push, 1) // since we are placing code, we want byte packing of the structs +// Four bytes starting at the instruction in the stub where the instruction +// access violation is converted to NullReferenceException at the caller site. +#ifdef UNIX_AMD64_ABI +#define X64_INSTR_CMP_IND_THIS_REG_RAX 0x013948 +#define X64_INSTR_MOV_RAX_IND_THIS_REG 0x018b48 +#else // UNIX_AMD64_ABI +#define X64_INSTR_CMP_IND_THIS_REG_RAX 0x073948 +#define X64_INSTR_MOV_RAX_IND_THIS_REG 0x078b48 +#endif // UNIX_AMD64_ABI + #define USES_LOOKUP_STUBS 1 /********************************************************************************************* @@ -594,13 +604,9 @@ void DispatchHolder::InitializeStatic() dispatchInit._entryPoint [0] = 0x48; dispatchInit._entryPoint [1] = 0xB8; dispatchInit._expectedMT = 0xcccccccccccccccc; - dispatchInit.part1 [0] = 0x48; - dispatchInit.part1 [1] = 0x39; -#ifdef UNIX_AMD64_ABI - dispatchInit.part1 [2] = 0x07; // RDI -#else - dispatchInit.part1 [2] = 0x01; // RCX -#endif + dispatchInit.part1 [0] = X64_INSTR_CMP_IND_THIS_REG_RAX & 0xff; + dispatchInit.part1 [1] = (X64_INSTR_CMP_IND_THIS_REG_RAX >> 8) & 0xff; + dispatchInit.part1 [2] = (X64_INSTR_CMP_IND_THIS_REG_RAX >> 16) & 0xff; dispatchInit.nopOp = 0x90; // Short dispatch stub initialization @@ -686,13 +692,9 @@ void ResolveHolder::InitializeStatic() resolveInit._resolveEntryPoint [1] = 0x49; resolveInit._resolveEntryPoint [2] = 0xBA; resolveInit._cacheAddress = 0xcccccccccccccccc; - resolveInit.part1 [ 0] = 0x48; - resolveInit.part1 [ 1] = 0x8B; -#ifdef UNIX_AMD64_ABI - resolveInit.part1 [ 2] = 0x07; // RDI -#else - resolveInit.part1 [ 2] = 0x01; // RCX -#endif + resolveInit.part1 [ 0] = X64_INSTR_MOV_RAX_IND_THIS_REG & 0xff; + resolveInit.part1 [ 1] = (X64_INSTR_MOV_RAX_IND_THIS_REG >> 8) & 0xff; + resolveInit.part1 [ 2] = (X64_INSTR_MOV_RAX_IND_THIS_REG >> 16) & 0xff; resolveInit.part1 [ 3] = 0x48; resolveInit.part1 [ 4] = 0x8B; resolveInit.part1 [ 5] = 0xD0; -- 2.7.4