if (target != nullptr)
{
#ifdef _TARGET_X86_
- if (((call->gtFlags & GTF_CALL_VIRT_KIND_MASK) == GTF_CALL_VIRT_STUB) && (call->gtCallType == CT_INDIRECT))
+ if (call->IsVirtualStub() && (call->gtCallType == CT_INDIRECT))
{
// On x86, we need to generate a very specific pattern for indirect VSD calls:
//
// Where EAX is also used as an argument to the stub dispatch helper. Make
// sure that the call target address is computed into EAX in this case.
- assert(target->isContainedIndir());
-
- // Disable random NOP emission
- getEmitter()->emitDisableRandomNops();
+ assert(REG_VIRTUAL_STUB_PARAM == REG_VIRTUAL_STUB_TARGET);
- GenTreeIndir* indir = target->AsIndir();
- assert(indir->Addr() == indir->Base());
- assert(indir->HasBase());
- assert(!indir->HasIndex());
- assert(indir->Scale() == 1);
- assert(indir->Offset() == 0);
+ assert(target->isContainedIndir());
+ assert(target->OperGet() == GT_IND);
- GenTree* base = indir->Base();
- genConsumeReg(base);
+ GenTree* addr = target->AsIndir()->Addr();
+ assert(!addr->isContained());
- if (base->gtRegNum != REG_EAX)
+ genConsumeReg(addr);
+ if (addr->gtRegNum != REG_VIRTUAL_STUB_TARGET)
{
- inst_RV_RV(INS_mov, REG_EAX, base->gtRegNum, TYP_I_IMPL);
+ inst_RV_RV(INS_mov, REG_VIRTUAL_STUB_TARGET, addr->gtRegNum, TYP_I_IMPL);
}
getEmitter()->emitIns_Nop(3);
-
getEmitter()->emitIns_Call(emitter::EmitCallType(emitter::EC_INDIR_ARD), methHnd,
INDEBUG_LDISASM_COMMA(sigInfo) nullptr, argSizeForEmitter, retSize
MULTIREG_HAS_SECOND_GC_RET_ONLY_ARG(secondRetSize),
gcInfo.gcVarPtrSetCur, gcInfo.gcRegGCrefSetCur, gcInfo.gcRegByrefSetCur,
- ilOffset, REG_EAX, REG_NA, indir->Scale(), indir->Offset());
-
- // Re-enable random NOP emission
- getEmitter()->emitEnableRandomNops();
+ ilOffset, REG_VIRTUAL_STUB_TARGET, REG_NA, 1, 0);
}
else
#endif
-
if (target->isContainedIndir())
{
if (target->AsIndir()->HasBase() && target->AsIndir()->Base()->isContainedIntOrIImmed())
#ifdef _TARGET_X86_
// Fast tail calls aren't currently supported on x86, but if they ever are, the code
// below that handles indirect VSD calls will need to be fixed.
- assert(!call->IsFastTailCall() || ((call->gtFlags & GTF_CALL_VIRT_KIND_MASK) != GTF_CALL_VIRT_STUB));
+ assert(!call->IsFastTailCall() || !call->IsVirtualStub());
#endif // _TARGET_X86_
}
//
// Where EAX is also used as an argument to the stub dispatch helper. Make
// sure that the call target address is computed into EAX in this case.
- if (((call->gtFlags & GTF_CALL_VIRT_KIND_MASK) == GTF_CALL_VIRT_STUB) && (call->gtCallType == CT_INDIRECT))
+ if (call->IsVirtualStub() && (call->gtCallType == CT_INDIRECT))
{
assert(ctrlExpr->isIndir());
- ctrlExpr->gtGetOp1()->gtLsraInfo.setDstCandidates(l, RBM_EAX);
+ ctrlExpr->gtGetOp1()->gtLsraInfo.setSrcCandidates(l, REG_VIRTUAL_STUB_TARGET);
MakeSrcContained(call, ctrlExpr);
}
else
assert(arg2 != nullptr);
nonStandardArgs.Add(arg2, REG_LNGARG_HI);
}
-#else // defined(_TARGET_X86_)
+#else // !defined(_TARGET_X86_)
// TODO-X86-CQ: Currently RyuJIT/x86 passes args on the stack, so this is not needed.
// If/when we change that, the following code needs to be changed to correctly support the (TBD) managed calling
// convention for x86/SSE.
nonStandardArgs.Add(arg, REG_VIRTUAL_STUB_PARAM);
}
else
-#endif // !defined(_TARGET_X86_)
+#endif // defined(_TARGET_X86_)
if (call->gtCallType == CT_INDIRECT && call->gtCallCookie)
{
assert(!call->IsUnmanaged());
numArgs++;
// x86 passes the cookie on the stack.
+ CLANG_FORMAT_COMMENT_ANCHOR;
+
#if !defined(_TARGET_X86_)
// put cookie into R11
nonStandardArgs.Add(arg, REG_PINVOKE_COOKIE_PARAM);
-#endif
+#endif // !defined(_TARGET_X86_)
// put destination into R10/EAX
arg = gtClone(call->gtCallAddr, true);