// fgMorphArgs will have created trees to pass the address in VirtualStubParam.reg.
// All we have to do here is add an indirection to generate the actual call target.
- GenTree* ind;
-
-#ifdef _TARGET_ARM_
- // For ARM, fgMorphTailCall has already made gtCallAddr a GT_IND for virtual stub tail calls.
- // (When we eliminate LEGACY_BACKEND maybe we can eliminate this asymmetry?)
- if (call->IsTailCallViaHelper())
- {
- ind = call->gtCallAddr;
- assert(ind->gtOper == GT_IND);
- }
- else
-#endif // _TARGET_ARM_
- {
- ind = Ind(call->gtCallAddr);
- BlockRange().InsertAfter(call->gtCallAddr, ind);
- call->gtCallAddr = ind;
- }
+ GenTree* ind = Ind(call->gtCallAddr);
+ BlockRange().InsertAfter(call->gtCallAddr, ind);
+ call->gtCallAddr = ind;
ind->gtFlags |= GTF_IND_REQ_ADDR_IN_REG;
if (result == nullptr)
{
- GenTree* indir = Ind(addr);
-
-// On x86 we generate this:
-// call dword ptr [rel32] ; FF 15 ---rel32----
-// So we don't use a register.
-#ifndef _TARGET_X86_
- // on x64 we must materialize the target using specific registers.
- addr->gtRegNum = comp->virtualStubParamInfo->GetReg();
-
-// On ARM we must use a proper address in R12(thunk register) without dereferencing.
-// So for the jump we use the default register.
-// TODO: specifying register probably unnecessary for other platforms, too.
-#if !defined(_TARGET_UNIX_) && !defined(_TARGET_ARM_) && !defined(_TARGET_ARM64_)
- indir->gtRegNum = REG_JUMP_THUNK_PARAM;
-#elif defined(_TARGET_ARM64_)
- // Prevent indir->gtRegNum from colliding with addr->gtRegNum
- indir->gtRegNum = REG_JUMP_THUNK_PARAM;
-
- // Sanity checks
- assert(addr->gtRegNum != indir->gtRegNum); // indir and addr registers must be different
- static_assert_no_msg((RBM_JUMP_THUNK_PARAM & RBM_ARG_REGS) == 0);
- static_assert_no_msg((RBM_JUMP_THUNK_PARAM & RBM_INT_CALLEE_TRASH) != 0);
-
-#elif defined(_TARGET_ARM_)
- // TODO-ARM-Cleanup: This is a temporarey hotfix to fix a regression observed in Linux/ARM.
- if (!comp->IsTargetAbi(CORINFO_CORERT_ABI))
- indir->gtRegNum = REG_JUMP_THUNK_PARAM;
-#endif
- indir->gtFlags |= GTF_IND_REQ_ADDR_IN_REG;
-#endif
- result = indir;
+ result = Ind(addr);
}
}
assert(arg2 != nullptr);
nonStandardArgs.Add(arg2, REG_LNGARG_HI);
}
-#else // !defined(_TARGET_X86_)
+#else // !_TARGET_X86_
// TODO-X86-CQ: Currently RyuJIT/x86 passes args on the stack, so this is not needed.
// If/when we change that, the following code needs to be changed to correctly support the (TBD) managed calling
// convention for x86/SSE.
nonStandardArgs.Add(cns, REG_PINVOKE_COOKIE_PARAM);
}
- else if (call->IsVirtualStub() && (call->gtCallType == CT_INDIRECT) && !call->IsTailCallViaHelper())
+ else if (call->IsVirtualStub())
{
- // indirect VSD stubs need the base of the indirection cell to be
- // passed in addition. At this point that is the value in gtCallAddr.
- // The actual call target will be derived from gtCallAddr in call
- // lowering.
-
- // If it is a VSD call getting dispatched via tail call helper,
- // fgMorphTailCall() would materialize stub addr as an additional
- // parameter added to the original arg list and hence no need to
- // add as a non-standard arg.
-
- GenTree* arg = call->gtCallAddr;
- if (arg->OperIsLocal())
+ if (!call->IsTailCallViaHelper())
{
- arg = gtClone(arg, true);
+ GenTree* stubAddrArg = fgGetStubAddrArg(call);
+ // And push the stub address onto the list of arguments
+ call->gtCallArgs = gtNewListNode(stubAddrArg, call->gtCallArgs);
+
+ numArgs++;
+ nonStandardArgs.Add(stubAddrArg, stubAddrArg->gtRegNum);
}
else
{
- call->gtCallAddr = fgInsertCommaFormTemp(&arg);
- call->gtFlags |= GTF_ASG;
+ // If it is a VSD call getting dispatched via tail call helper,
+ // fgMorphTailCall() would materialize stub addr as an additional
+ // parameter added to the original arg list and hence no need to
+ // add as a non-standard arg.
}
- noway_assert(arg != nullptr);
-
- // And push the stub address onto the list of arguments
- call->gtCallArgs = gtNewListNode(arg, call->gtCallArgs);
- numArgs++;
-
- nonStandardArgs.Add(arg, virtualStubParamInfo->GetReg());
}
else
-#endif // defined(_TARGET_X86_)
+#endif // !_TARGET_X86_
if (call->gtCallType == CT_INDIRECT && (call->gtCallCookie != nullptr))
{
assert(!call->IsUnmanaged());
if (call->IsVirtualStub())
{
flags = CORINFO_TAILCALL_STUB_DISPATCH_ARG;
-
+#ifdef LEGACY_BACKEND
GenTree* arg;
if (call->gtCallType == CT_INDIRECT)
{
arg = gtClone(call->gtCallAddr, true);
- noway_assert(arg != NULL);
+ noway_assert(arg != nullptr);
}
else
{
call->gtStubCallStubAddr = NULL;
call->gtCallType = CT_INDIRECT;
}
+ arg->gtRegNum = virtualStubParamInfo->GetReg();
// Add the extra indirection to generate the real target
call->gtCallAddr = gtNewOperNode(GT_IND, TYP_I_IMPL, call->gtCallAddr);
call->gtFlags |= GTF_EXCEPT;
+ call->gtCallArgs = gtNewListNode(arg, call->gtCallArgs);
+#else // !LEGACY_BACKEND
+ GenTree* stubAddrArg = fgGetStubAddrArg(call);
// And push the stub address onto the list of arguments
- call->gtCallArgs = gtNewListNode(arg, call->gtCallArgs);
+ call->gtCallArgs = gtNewListNode(stubAddrArg, call->gtCallArgs);
+#endif // !LEGACY_BACKEND
}
else if (call->IsVirtualVtable())
{
GenTree* arg = new (this, GT_NOP) GenTreeOp(GT_NOP, TYP_I_IMPL);
codeGen->genMarkTreeInReg(arg, REG_TAILCALL_ADDR);
#else // !LEGACY_BACKEND
- GenTree* arg = gtNewIconNode(0, TYP_I_IMPL);
+ GenTree* arg = gtNewIconNode(0, TYP_I_IMPL);
#endif // !LEGACY_BACKEND
call->gtCallArgs = gtNewListNode(arg, call->gtCallArgs);
CorInfoHelperTailCallSpecialHandling flags = CorInfoHelperTailCallSpecialHandling(0);
if (call->IsVirtualStub())
{
- GenTree* stubAddrArg;
-
flags = CORINFO_TAILCALL_STUB_DISPATCH_ARG;
- if (call->gtCallType == CT_INDIRECT)
- {
- stubAddrArg = gtClone(call->gtCallAddr, true);
- noway_assert(stubAddrArg != nullptr);
- }
- else
- {
- noway_assert((call->gtCallMoreFlags & GTF_CALL_M_VIRTSTUB_REL_INDIRECT) != 0);
-
- ssize_t addr = ssize_t(call->gtStubCallStubAddr);
- stubAddrArg = gtNewIconHandleNode(addr, GTF_ICON_FTN_ADDR);
- }
-
- // Push the stub address onto the list of arguments
+ GenTree* stubAddrArg = fgGetStubAddrArg(call);
+ // And push the stub address onto the list of arguments
call->gtCallArgs = gtNewListNode(stubAddrArg, call->gtCallArgs);
}
DISPTREE(call);
}
+//------------------------------------------------------------------------
+// fgGetStubAddrArg: Return the virtual stub address for the given call.
+//
+// Notes:
+// the JIT must place the address of the stub used to load the call target,
+// the "stub indirection cell", in special call argument with special register.
+//
+// Arguments:
+// call - a call that needs virtual stub dispatching.
+//
+// Return Value:
+// addr tree with set resister requirements.
+//
+GenTree* Compiler::fgGetStubAddrArg(GenTreeCall* call)
+{
+ assert(call->IsVirtualStub());
+ GenTree* stubAddrArg;
+ if (call->gtCallType == CT_INDIRECT)
+ {
+ stubAddrArg = gtClone(call->gtCallAddr, true);
+ }
+ else
+ {
+ assert(call->gtCallMoreFlags & GTF_CALL_M_VIRTSTUB_REL_INDIRECT);
+ ssize_t addr = ssize_t(call->gtStubCallStubAddr);
+ stubAddrArg = gtNewIconHandleNode(addr, GTF_ICON_FTN_ADDR);
+ }
+ assert(stubAddrArg != nullptr);
+ stubAddrArg->gtRegNum = virtualStubParamInfo->GetReg();
+ return stubAddrArg;
+}
+
//------------------------------------------------------------------------------
// fgMorphRecursiveFastTailCallIntoLoop : Transform a recursive fast tail call into a loop.
//