`EventPipeConfig` | Configuration for EventPipe. | `STRING` | `INTERNAL` | |
`EventPipeOutputFile` | The full path including file name for the trace file that will be written when COMPlus_EnableEventPipe&=1 | `STRING` | `INTERNAL` | |
`EventPipeRundown` | Enable/disable eventpipe rundown. | `DWORD` | `INTERNAL` | `1` |
-`ExposeExceptionsInCOM` | | `DWORD` | `INTERNAL` | |
-`GenerateStubForHost` | Forces the host hook stub to be built for all unmanaged calls, even when not running hosted. | `DWORD` | `INTERNAL` | `0` |
+`ExposeExceptionsInCOM` | | `DWORD` | `INTERNAL` | |
`InteropLogArguments` | Log all pinned arguments passed to an interop call | `DWORD` | `EXTERNAL` | `0` |
`InteropValidatePinnedObjects` | After returning from a managed-to-unmanaged interop call, validate GC heap around objects pinned by IL stubs. | `DWORD` | `UNSUPPORTED` | `0` |
`legacyComHierarchyVisibility` | | `DWORD` | `EXTERNAL` | |
///
CONFIG_DWORD_INFO_DIRECT_ACCESS(INTERNAL_ExposeExceptionsInCOM, W("ExposeExceptionsInCOM"), "")
RETAIL_CONFIG_DWORD_INFO(EXTERNAL_ComInsteadOfManagedRemoting, W("PreferComInsteadOfManagedRemoting"), 0, "When communicating with a cross app domain CCW, use COM instead of managed remoting.")
-CONFIG_DWORD_INFO(INTERNAL_GenerateStubForHost, W("GenerateStubForHost"), 0, "Forces the host hook stub to be built for all unmanaged calls, even when not running hosted.")
RETAIL_CONFIG_DWORD_INFO_DIRECT_ACCESS(EXTERNAL_legacyComHierarchyVisibility, W("legacyComHierarchyVisibility"), "")
RETAIL_CONFIG_DWORD_INFO_DIRECT_ACCESS(EXTERNAL_legacyComVTableLayout, W("legacyComVTableLayout"), "")
RETAIL_CONFIG_DWORD_INFO_DIRECT_ACCESS(EXTERNAL_newComVTableLayout, W("newComVTableLayout"), "")
include AsmConstants.inc
gfHostConfig equ ?g_fHostConfig@@3KA
-NDirect__IsHostHookEnabled equ ?IsHostHookEnabled@NDirect@@SAHXZ
extern CreateThreadBlockThrow:proc
extern TheUMEntryPrestubWorker:proc
MethodDesc *pStubMD = pClass->m_pForwardStubMD;
_ASSERTE(pStubMD != NULL && pStubMD->IsILStub());
-
-#ifdef MDA_SUPPORTED
+#if defined(MDA_SUPPORTED)
if (MDA_GET_ASSISTANT(PInvokeStackImbalance))
{
pInterceptStub = GenerateStubForMDA(pMD, pStubMD, pCallback, pInterceptStub);
}
#endif // MDA_SUPPORTED
-
-
}
if (pInterceptStub != NULL)
}
GCPROTECT_END();
-#endif // defined(_TARGET_X86_)
+#endif // _TARGET_X86_
return delObj;
}
#ifdef MDA_SUPPORTED
static Stub *GenerateStubForMDA(MethodDesc *pInvokeMD, MethodDesc *pStubMD, LPVOID pNativeTarget, Stub *pInnerStub);
#endif // MDA_SUPPORTED
- static Stub *GenerateStubForHost(MethodDesc *pInvokeMD, MethodDesc *pStubMD, LPVOID pNativeTarget, Stub *pInnerStub);
#endif // _TARGET_X86_
static MethodDesc * __fastcall GetMethodDesc(OBJECTREF obj);
#endif
SetCompilationDomain();
-
-
-#ifdef _DEBUG
- g_pConfig->DisableGenerateStubForHost();
-#endif
}
HRESULT CompilationDomain::AddDependencyEntry(PEAssembly *pFile,
m_slIL.AdjustTargetStackDeltaForExtraParam();
}
-#if defined(_TARGET_X86_)
- // unmanaged CALLI will get an extra arg with the real target address if host hook is enabled
- if (SF_IsCALLIStub(m_dwStubFlags) && NDirect::IsHostHookEnabled())
- {
- pcsMarshal->SetStubTargetArgType(ELEMENT_TYPE_I, false);
- }
-#endif // _TARGET_X86_
-
// Don't touch target signatures from this point on otherwise it messes up the
// cache in ILStubState::GetStubTargetMethodSig.
BinderMethodID getCOMIPMethod;
bool fDoPostCallIPCleanup = true;
- if (!SF_IsNGENedStub(dwStubFlags) && NDirect::IsHostHookEnabled())
- {
- // always use the non-optimized helper if we are hosted
- getCOMIPMethod = METHOD__STUBHELPERS__GET_COM_IP_FROM_RCW;
- }
- else if (SF_IsWinRTStub(dwStubFlags))
+ if (SF_IsWinRTStub(dwStubFlags))
{
// WinRT uses optimized helpers
if (SF_IsWinRTSharedGenericStub(dwStubFlags))
}
#endif // MDA_SUPPORTED
- if (NDirect::IsHostHookEnabled())
- {
- MethodTable *pMT = pMD->GetMethodTable();
- if (pMT->IsProjectedFromWinRT() || pMT->IsWinRTRedirectedInterface(TypeHandle::Interop_ManagedToNative))
- {
- // WinRT NGENed stubs are optimized for the non-hosted scenario and
- // must be rejected if we are hosted.
- return NULL;
- }
- }
-
if (fGcMdaEnabled)
return NULL;
#ifdef FEATURE_COMINTEROP
if (SF_IsWinRTDelegateStub(dwStubFlags))
{
- if (NDirect::IsHostHookEnabled() && pMD->GetMethodTable()->IsProjectedFromWinRT())
- {
- // WinRT NGENed stubs are optimized for the non-hosted scenario and
- // must be rejected if we are hosted.
- return NULL;
- }
-
return pClass->m_pComPlusCallInfo->m_pStubMD.GetValueMaybeNull();
}
else
Stub *pInterceptStub = NULL;
- BOOL fHook = FALSE;
-
- // Host hooks are not supported for Mac CoreCLR.
- if (NDirect::IsHostHookEnabled())
- {
-#ifdef _WIN64
- // we will call CallNeedsHostHook on every invocation for back compat
- fHook = TRUE;
-#else // _WIN64
- fHook = CallNeedsHostHook((size_t)pTarget);
-#endif // _WIN64
-
-#ifdef _DEBUG
- if (g_pConfig->ShouldGenerateStubForHost())
- {
- fHook = TRUE;
- }
-#endif
- }
-
#ifdef _TARGET_X86_
#ifdef MDA_SUPPORTED
if (!IsQCall() && MDA_GET_ASSISTANT(PInvokeStackImbalance))
{
- pInterceptStub = GenerateStubForMDA(pTarget, pInterceptStub, fHook);
+ pInterceptStub = GenerateStubForMDA(pTarget, pInterceptStub);
}
#endif // MDA_SUPPORTED
EnsureWritablePages(pWriteableData);
g_IBCLogger.LogNDirectCodeAccess(this);
- if (pInterceptStub != NULL WIN64_ONLY(|| fHook))
+ if (pInterceptStub != NULL)
{
ndirect.m_pNativeNDirectTarget = pTarget;
inline static ILStubCache* GetILStubCache(NDirectStubParameters* pParams);
-
- static BOOL IsHostHookEnabled();
-
- static Stub *GenerateStubForHost(Module *pModule, CorUnmanagedCallingConvention callConv, WORD wArgSize);
-
private:
NDirect() {LIMITED_METHOD_CONTRACT;}; // prevent "new"'s on this class
HRESULT FindPredefinedILStubMethod(MethodDesc *pTargetMD, DWORD dwStubFlags, MethodDesc **ppRetStubMD);
#endif // FEATURE_COMINTEROP
-EXTERN_C BOOL CallNeedsHostHook(size_t target);
-
-//
-// Inlinable implementation allows compiler to strip all code related to host hook
-//
-inline BOOL NDirect::IsHostHookEnabled()
-{
- LIMITED_METHOD_CONTRACT;
- return FALSE;
-}
-
-inline BOOL CallNeedsHostHook(size_t target)
-{
- LIMITED_METHOD_CONTRACT;
- return FALSE;
-}
-
//
// Limit length of string field in IL stub ETW events so that the whole
// IL stub ETW events won't exceed 64KB
iGCPollType = GCPOLL_TYPE_DEFAULT;
#ifdef _DEBUG
- fGenerateStubForHost = FALSE;
fShouldInjectFault = 0;
testThreadAbort = 0;
testADUnload = 0;
IfFailRet(ParseTypeList(wszPerfTypes, &pPerfTypesToLog));
iPerfNumAllocsThreshold = CLRConfig::GetConfigValue(CLRConfig::INTERNAL_PerfNumAllocsThreshold);
- iPerfAllocsSizeThreshold = CLRConfig::GetConfigValue(CLRConfig::INTERNAL_PerfAllocsSizeThreshold);
-
- fGenerateStubForHost = CLRConfig::GetConfigValue(CLRConfig::INTERNAL_GenerateStubForHost);
+ iPerfAllocsSizeThreshold = CLRConfig::GetConfigValue(CLRConfig::INTERNAL_PerfAllocsSizeThreshold);
fShouldInjectFault = CLRConfig::GetConfigValue(CLRConfig::INTERNAL_InjectFault);
GCPollType GetGCPollType() { LIMITED_METHOD_CONTRACT; return iGCPollType; }
#ifdef _DEBUG
- BOOL ShouldGenerateStubForHost() const {LIMITED_METHOD_CONTRACT; return fGenerateStubForHost;}
- void DisableGenerateStubForHost() {LIMITED_METHOD_CONTRACT; fGenerateStubForHost = FALSE;}
-
DWORD GetHostTestADUnload() const {LIMITED_METHOD_CONTRACT; return testADUnload;}
DWORD GetHostTestThreadAbort() const {LIMITED_METHOD_CONTRACT; return testThreadAbort;}
GCPollType iGCPollType;
#ifdef _DEBUG
- BOOL fGenerateStubForHost;
DWORD fShouldInjectFault;
DWORD testADUnload;
DWORD testThreadAbort;
#ifdef MDA_SUPPORTED
//-----------------------------------------------------------------------------
-Stub *NDirectMethodDesc::GenerateStubForMDA(LPVOID pNativeTarget, Stub *pInnerStub, BOOL fCalledByStub)
+Stub *NDirectMethodDesc::GenerateStubForMDA(LPVOID pNativeTarget, Stub *pInnerStub)
{
STANDARD_VM_CONTRACT;
if (IsVarArgs())
{
// Re-push the return address as an argument to GetStackSizeForVarArgCall()
- if (fCalledByStub)
- {
- // We will be called by another stub that doesn't know the stack size,
- // so we need to skip a frame to get to the managed caller.
- sl.X86EmitIndexRegLoad(kEAX, kEBP, 0);
- sl.X86EmitIndexPush(kEAX, 4);
- }
- else
- {
- sl.X86EmitIndexPush(kEBP, 4);
- }
+ sl.X86EmitIndexPush(kEBP, 4);
// This will return the number of stack arguments (in DWORDs)
sl.X86EmitCall(sl.NewExternalCodeLabel((LPVOID)GetStackSizeForVarArgCall), 4);
LPVOID FindEntryPoint(HINSTANCE hMod) const;
private:
- Stub* GenerateStubForHost(LPVOID pNativeTarget, Stub *pInnerStub);
#ifdef MDA_SUPPORTED
- Stub* GenerateStubForMDA(LPVOID pNativeTarget, Stub *pInnerStub, BOOL fCalledByStub);
+ Stub* GenerateStubForMDA(LPVOID pNativeTarget, Stub *pInnerStub);
#endif // MDA_SUPPORTED
public:
LPVOID m_pInterceptStub; // used for early-bound IL stub calls
};
- Stub *GenerateStubForHost(LoaderHeap *pHeap, Stub *pInnerStub);
#else // _TARGET_X86_
void InitStackArgumentSize()
{
_ASSERTE(pComInfo->m_pInterceptStub == NULL || pComInfo->m_pInterceptStub == (LPVOID)-1);
_ASSERTE(!pComInfo->HasCopyCtorArgs());
#endif // _TARGET_X86_
- _ASSERTE(!NDirect::IsHostHookEnabled());
LPVOID *lpVtbl = *(LPVOID **)pUnk;
return lpVtbl[pComInfo->m_cachedComSlot];
{
#if defined(_TARGET_X86_)
Stub *pInterceptStub = GetInterceptStub();
-
if (pInterceptStub != NULL)
{
- // There may be multiple chained stubs, i.e. host hook stub calling MDA stack
- // imbalance stub, and the following DecRef will free all of them.
+ // There may be multiple chained stubs
pInterceptStub->DecRef();
}
#else // _TARGET_X86_
// to the thunk generated for unmanaged code to call back on.
// If this is a delegate representing an unmanaged function pointer,
// this may point to a stub that intercepts calls to the unmng target.
- // It is currently used for pInvokeStackImbalance MDA and host hook.
- // We differentiate between the two by setting the lowest bit if it's
- // an intercept stub.
+ // An example of an intercept call is pInvokeStackImbalance MDA.
+ // We differentiate between a thunk or intercept stub by setting the lowest
+ // bit if it is an intercept stub.
void* m_pUMEntryThunkOrInterceptStub;
#ifdef FEATURE_COMINTEROP