This simplifies reading the code, and improves the debugging experience.
if ((tree->gtFlags & GTF_CALL_NULLCHECK) || ((tree->gtFlags & GTF_CALL_VIRT_KIND_MASK) != GTF_CALL_NONVIRT))
{
// Retrieve the 'this' arg
- GenTreePtr thisArg = gtGetThisArg(tree);
+ GenTreePtr thisArg = gtGetThisArg(tree->AsCall());
#if defined(_TARGET_X86_) || defined(_TARGET_AMD64_) || defined(_TARGET_ARM_)
if (thisArg == nullptr)
{
* Returns the modified tree, or nullptr if no assertion prop took place.
*
*/
-GenTreePtr Compiler::optNonNullAssertionProp_Call(ASSERT_VALARG_TP assertions,
- const GenTreePtr tree,
- const GenTreePtr stmt)
+GenTreePtr Compiler::optNonNullAssertionProp_Call(ASSERT_VALARG_TP assertions, GenTreeCall* call, const GenTreePtr stmt)
{
- assert(tree->gtOper == GT_CALL);
- if ((tree->gtFlags & GTF_CALL_NULLCHECK) == 0)
+ if ((call->gtFlags & GTF_CALL_NULLCHECK) == 0)
{
return nullptr;
}
- GenTreePtr op1 = gtGetThisArg(tree);
+ GenTreePtr op1 = gtGetThisArg(call);
noway_assert(op1 != nullptr);
if (op1->gtOper != GT_LCL_VAR)
{
{
(vnBased) ? printf("\nVN based non-null prop in BB%02u:\n", compCurBB->bbNum)
: printf("\nNon-null prop for index #%02u in BB%02u:\n", index, compCurBB->bbNum);
- gtDispTree(tree, nullptr, nullptr, true);
+ gtDispTree(call, nullptr, nullptr, true);
}
#endif
- tree->gtFlags &= ~GTF_CALL_NULLCHECK;
- tree->gtFlags &= ~GTF_EXCEPT;
- noway_assert(tree->gtFlags & GTF_SIDE_EFFECT);
- return tree;
+ call->gtFlags &= ~GTF_CALL_NULLCHECK;
+ call->gtFlags &= ~GTF_EXCEPT;
+ noway_assert(call->gtFlags & GTF_SIDE_EFFECT);
+ return call;
}
return nullptr;
}
*
*/
-GenTreePtr Compiler::optAssertionProp_Call(ASSERT_VALARG_TP assertions, const GenTreePtr tree, const GenTreePtr stmt)
+GenTreePtr Compiler::optAssertionProp_Call(ASSERT_VALARG_TP assertions, GenTreeCall* call, const GenTreePtr stmt)
{
- assert(tree->gtOper == GT_CALL);
-
- if (optNonNullAssertionProp_Call(assertions, tree, stmt))
+ if (optNonNullAssertionProp_Call(assertions, call, stmt))
{
- return optAssertionProp_Update(tree, tree, stmt);
+ return optAssertionProp_Update(call, call, stmt);
}
- else if (!optLocalAssertionProp && (tree->gtCall.gtCallType == CT_HELPER))
+ else if (!optLocalAssertionProp && (call->gtCallType == CT_HELPER))
{
- if (tree->gtCall.gtCallMethHnd == eeFindHelper(CORINFO_HELP_ISINSTANCEOFINTERFACE) ||
- tree->gtCall.gtCallMethHnd == eeFindHelper(CORINFO_HELP_ISINSTANCEOFARRAY) ||
- tree->gtCall.gtCallMethHnd == eeFindHelper(CORINFO_HELP_ISINSTANCEOFCLASS) ||
- tree->gtCall.gtCallMethHnd == eeFindHelper(CORINFO_HELP_ISINSTANCEOFANY) ||
- tree->gtCall.gtCallMethHnd == eeFindHelper(CORINFO_HELP_CHKCASTINTERFACE) ||
- tree->gtCall.gtCallMethHnd == eeFindHelper(CORINFO_HELP_CHKCASTARRAY) ||
- tree->gtCall.gtCallMethHnd == eeFindHelper(CORINFO_HELP_CHKCASTCLASS) ||
- tree->gtCall.gtCallMethHnd == eeFindHelper(CORINFO_HELP_CHKCASTANY) ||
- tree->gtCall.gtCallMethHnd == eeFindHelper(CORINFO_HELP_CHKCASTCLASS_SPECIAL))
+ if (call->gtCallMethHnd == eeFindHelper(CORINFO_HELP_ISINSTANCEOFINTERFACE) ||
+ call->gtCallMethHnd == eeFindHelper(CORINFO_HELP_ISINSTANCEOFARRAY) ||
+ call->gtCallMethHnd == eeFindHelper(CORINFO_HELP_ISINSTANCEOFCLASS) ||
+ call->gtCallMethHnd == eeFindHelper(CORINFO_HELP_ISINSTANCEOFANY) ||
+ call->gtCallMethHnd == eeFindHelper(CORINFO_HELP_CHKCASTINTERFACE) ||
+ call->gtCallMethHnd == eeFindHelper(CORINFO_HELP_CHKCASTARRAY) ||
+ call->gtCallMethHnd == eeFindHelper(CORINFO_HELP_CHKCASTCLASS) ||
+ call->gtCallMethHnd == eeFindHelper(CORINFO_HELP_CHKCASTANY) ||
+ call->gtCallMethHnd == eeFindHelper(CORINFO_HELP_CHKCASTCLASS_SPECIAL))
{
- GenTreePtr arg1 = gtArgEntryByArgNum(tree->AsCall(), 1)->node;
+ GenTreePtr arg1 = gtArgEntryByArgNum(call, 1)->node;
if (arg1->gtOper != GT_LCL_VAR)
{
return nullptr;
}
- GenTreePtr arg2 = gtArgEntryByArgNum(tree->AsCall(), 0)->node;
+ GenTreePtr arg2 = gtArgEntryByArgNum(call, 0)->node;
unsigned index = optAssertionIsSubtype(arg1, arg2, assertions);
if (index != NO_ASSERTION_INDEX)
if (verbose)
{
printf("\nDid VN based subtype prop for index #%02u in BB%02u:\n", index, compCurBB->bbNum);
- gtDispTree(tree, nullptr, nullptr, true);
+ gtDispTree(call, nullptr, nullptr, true);
}
#endif
GenTreePtr list = nullptr;
- gtExtractSideEffList(tree, &list, GTF_SIDE_EFFECT, true);
+ gtExtractSideEffList(call, &list, GTF_SIDE_EFFECT, true);
if (list != nullptr)
{
- arg1 = gtNewOperNode(GT_COMMA, tree->TypeGet(), list, arg1);
+ arg1 = gtNewOperNode(GT_COMMA, call->TypeGet(), list, arg1);
fgSetTreeSeq(arg1);
}
- return optAssertionProp_Update(arg1, tree, stmt);
+ return optAssertionProp_Update(arg1, call, stmt);
}
}
}
return optAssertionProp_Cast(assertions, tree, stmt);
case GT_CALL:
- return optAssertionProp_Call(assertions, tree, stmt);
+ return optAssertionProp_Call(assertions, tree->AsCall(), stmt);
case GT_EQ:
case GT_NE:
GenTreePtr newTree = nullptr;
if (tree->OperGet() == GT_CALL)
{
- newTree = optNonNullAssertionProp_Call(empty, tree, stmt);
+ newTree = optNonNullAssertionProp_Call(empty, tree->AsCall(), stmt);
}
else if (tree->OperIsIndir())
{
instruction ins, regNumber reg, TempDsc* tmp, unsigned ofs, var_types type, emitAttr size = EA_UNKNOWN);
void inst_FS_ST(instruction ins, emitAttr size, TempDsc* tmp, unsigned ofs);
- void instEmit_indCall(GenTreePtr call,
- size_t argSize,
+ void instEmit_indCall(GenTreeCall* call,
+ size_t argSize,
emitAttr retSize MULTIREG_HAS_SECOND_GC_RET_ONLY_ARG(emitAttr secondRetSize));
void instEmit_RM(instruction ins, GenTreePtr tree, GenTreePtr addr, unsigned offs);
{
assert(treeNode->OperGet() == GT_PUTARG_STK);
var_types targetType = treeNode->TypeGet();
- GenTreePtr source = treeNode->gtOp.gtOp1;
+ GenTreePtr source = treeNode->gtOp1;
emitter* emit = getEmitter();
// This is the varNum for our store operations,
// Get argument offset to use with 'varNumOut'
// Here we cross check that argument offset hasn't changed from lowering to codegen since
// we are storing arg slot number in GT_PUTARG_STK node in lowering phase.
- unsigned argOffsetOut = treeNode->AsPutArgStk()->gtSlotNum * TARGET_POINTER_SIZE;
+ unsigned argOffsetOut = treeNode->gtSlotNum * TARGET_POINTER_SIZE;
#ifdef DEBUG
- fgArgTabEntryPtr curArgTabEntry = compiler->gtArgEntryByNode(treeNode->AsPutArgStk()->gtCall, treeNode);
+ fgArgTabEntryPtr curArgTabEntry = compiler->gtArgEntryByNode(treeNode->gtCall, treeNode);
assert(curArgTabEntry);
assert(argOffsetOut == (curArgTabEntry->slotNum * TARGET_POINTER_SIZE));
#endif // DEBUG
break;
case GT_CALL:
- genCallInstruction(treeNode);
+ genCallInstruction(treeNode->AsCall());
break;
case GT_LOCKADD:
//------------------------------------------------------------------------
// genCallInstruction: Produce code for a GT_CALL node
//
-void CodeGen::genCallInstruction(GenTreePtr node)
+void CodeGen::genCallInstruction(GenTreeCall* call)
{
- GenTreeCall* call = node->AsCall();
-
- assert(call->gtOper == GT_CALL);
-
gtCallTypes callType = (gtCallTypes)call->gtCallType;
IL_OFFSETX ilOffset = BAD_IL_OFFSET;
if (call->NeedsNullCheck())
{
const regNumber regThis = genGetThisArgReg(call);
- const regNumber tmpReg = genRegNumFromMask(node->gtRsvdRegs);
+ const regNumber tmpReg = genRegNumFromMask(call->gtRsvdRegs);
getEmitter()->emitIns_R_R_I(INS_ldr, EA_4BYTE, tmpReg, regThis, 0);
}
if (callType == CT_INDIRECT)
{
assert(target == nullptr);
- target = call->gtCall.gtCallAddr;
+ target = call->gtCallAddr;
methHnd = nullptr;
}
else
// Non-virtual direct call to known addresses
if (!arm_Valid_Imm_For_BL((ssize_t)addr))
{
- regNumber tmpReg = genRegNumFromMask(node->gtRsvdRegs);
+ regNumber tmpReg = genRegNumFromMask(call->gtRsvdRegs);
instGen_Set_Reg_To_Imm(EA_HANDLE_CNS_RELOC, tmpReg, (ssize_t)addr);
genEmitCall(emitter::EC_INDIR_R, methHnd, INDEBUG_LDISASM_COMMA(sigInfo) NULL, retSize, ilOffset, tmpReg);
}
var_types targetType = treeNode->TypeGet();
emitter *emit = getEmitter();
emitAttr size = emitTypeSize(treeNode);
- GenTree *op1 = treeNode->gtOp.gtOp1;
- GenTree *op2 = treeNode->gtOp.gtOp2;
+ GenTree *op1 = treeNode->gtOp1;
+ GenTree *op2 = treeNode->gtOp2;
// to get the high bits of the multiply, we are constrained to using the
// 1-op form: RDX:RAX = RAX * rm
// The 3-op form (Rx=Ry*Rz) does not support it.
- genConsumeOperands(treeNode->AsOp());
+ genConsumeOperands(treeNode);
GenTree* regOp = op1;
GenTree* rmOp = op2;
break;
case GT_CALL:
- genCallInstruction(treeNode);
+ genCallInstruction(treeNode->AsCall());
break;
case GT_JMP:
}
// Produce code for a GT_CALL node
-void CodeGen::genCallInstruction(GenTreePtr node)
+void CodeGen::genCallInstruction(GenTreeCall* call)
{
- GenTreeCall* call = node->AsCall();
-
- assert(call->gtOper == GT_CALL);
-
gtCallTypes callType = (gtCallTypes)call->gtCallType;
IL_OFFSETX ilOffset = BAD_IL_OFFSET;
if (callType == CT_INDIRECT)
{
assert(target == nullptr);
- target = call->gtCall.gtCallAddr;
+ target = call->gtCallAddr;
methHnd = nullptr;
}
else
{
assert(treeNode->OperGet() == GT_PUTARG_STK);
var_types targetType = treeNode->TypeGet();
- GenTreePtr source = treeNode->gtOp.gtOp1;
+ GenTreePtr source = treeNode->gtOp1;
emitter* emit = getEmitter();
// This is the varNum for our store operations,
// Get argument offset to use with 'varNumOut'
// Here we cross check that argument offset hasn't changed from lowering to codegen since
// we are storing arg slot number in GT_PUTARG_STK node in lowering phase.
- unsigned argOffsetOut = treeNode->AsPutArgStk()->gtSlotNum * TARGET_POINTER_SIZE;
+ unsigned argOffsetOut = treeNode->gtSlotNum * TARGET_POINTER_SIZE;
#ifdef DEBUG
- fgArgTabEntryPtr curArgTabEntry = compiler->gtArgEntryByNode(treeNode->AsPutArgStk()->gtCall, treeNode);
+ fgArgTabEntryPtr curArgTabEntry = compiler->gtArgEntryByNode(treeNode->gtCall, treeNode);
assert(curArgTabEntry);
assert(argOffsetOut == (curArgTabEntry->slotNum * TARGET_POINTER_SIZE));
#endif // DEBUG
#if FEATURE_FASTTAILCALL
- bool putInIncomingArgArea = treeNode->AsPutArgStk()->putInIncomingArgArea;
+ bool putInIncomingArgArea = treeNode->putInIncomingArgArea;
#else
const bool putInIncomingArgArea = false;
#endif
argOffsetMax = compiler->compArgSize;
#if FEATURE_FASTTAILCALL
// This must be a fast tail call.
- assert(treeNode->AsPutArgStk()->gtCall->AsCall()->IsFastTailCall());
+ assert(treeNode->gtCall->IsFastTailCall());
// Since it is a fast tail call, the existence of first incoming arg is guaranteed
// because fast tail call requires that in-coming arg area of caller is >= out-going
void genCodeForSwitchTable(GenTreePtr tree);
void genCodeForSwitch(GenTreePtr tree);
-size_t genPushArgList(GenTreePtr call);
+size_t genPushArgList(GenTreeCall* call);
#ifdef _TARGET_ARM_
// We are generating code for a promoted struct local variable. Fill the next slot (register or
// of cpBlk).
regMaskTP genFindDeadFieldRegs(GenTreePtr cpBlk);
-void SetupLateArgs(GenTreePtr call);
+void SetupLateArgs(GenTreeCall* call);
#ifdef _TARGET_ARM_
void PushMkRefAnyArg(GenTreePtr mkRefAnyTree, fgArgTabEntryPtr curArgTabEntry, regMaskTP regNeedMask);
#endif // _TARGET_ARM_
-regMaskTP genLoadIndirectCallTarget(GenTreePtr call);
+regMaskTP genLoadIndirectCallTarget(GenTreeCall* call);
-regMaskTP genCodeForCall(GenTreePtr call, bool valUsed);
+regMaskTP genCodeForCall(GenTreeCall* call, bool valUsed);
GenTreePtr genGetAddrModeBase(GenTreePtr tree);
#endif // LEGACY_BACKEND
// inline
-regNumber CodeGenInterface::genGetThisArgReg(GenTreePtr call)
+regNumber CodeGenInterface::genGetThisArgReg(GenTreeCall* call) const
{
- noway_assert(call->IsCall());
return REG_ARG_0;
}
int genSPtoFPdelta();
int genTotalFrameSize();
- regNumber genGetThisArgReg(GenTreePtr call);
+ regNumber genGetThisArgReg(GenTreeCall* call) const;
#ifdef _TARGET_XARCH_
#ifdef _TARGET_AMD64_
// We have a return call() because we failed to tail call.
// In any case, just generate the call and be done.
assert(compiler->IsHfa(op1));
- genCodeForCall(op1, true);
+ genCodeForCall(op1->AsCall(), true);
genMarkTreeInReg(op1, REG_FLOATRET);
}
else
assert(op2->gtOper == GT_CALL);
// Generate code for call and copy the return registers into the local.
- regMaskTP retMask = genCodeForCall(op2, true);
+ regMaskTP retMask = genCodeForCall(op2->AsCall(), true);
// Ret mask should be contiguously set from s0, up to s3 or starting from d0 upto d3.
CLANG_FORMAT_COMMENT_ANCHOR;
switch (oper)
{
case GT_CALL:
- regs = genCodeForCall(tree, true);
+ regs = genCodeForCall(tree->AsCall(), true);
/* If the result is in a register, make sure it ends up in the right place */
// Managed Retval under managed debugger - we need to make sure that the returned ref-type is
// reported as alive even though not used within the caller for managed debugger sake. So
// consider the return value of the method as used if generating debuggable code.
- genCodeForCall(tree, compiler->opts.MinOpts() || compiler->opts.compDbgCode);
+ genCodeForCall(tree->AsCall(), compiler->opts.MinOpts() || compiler->opts.compDbgCode);
genUpdateLife(tree);
gcInfo.gcMarkRegSetNpt(RBM_INTRET);
break;
{
regMaskTP retMask;
case GT_CALL:
- retMask = genCodeForCall(tree, true);
+ retMask = genCodeForCall(tree->AsCall(), true);
if (retMask == RBM_NONE)
regPair = REG_PAIR_NONE;
else
#pragma warning(push)
#pragma warning(disable : 21000) // Suppress PREFast warning about overly large function
#endif
-size_t CodeGen::genPushArgList(GenTreePtr call)
+size_t CodeGen::genPushArgList(GenTreeCall* call)
{
- GenTreeArgList* regArgs = call->gtCall.gtCallLateArgs;
+ GenTreeArgList* regArgs = call->gtCallLateArgs;
size_t size = 0;
regMaskTP addrReg;
// Create a local, artificial GenTreeArgList that includes the gtCallObjp, if that exists, as first argument,
// so we can iterate over this argument list more uniformly.
// Need to provide a temporary non-null first argument here: if we use this, we'll replace it.
- GenTreeArgList firstForObjp(/*temp dummy arg*/ call, call->gtCall.gtCallArgs);
- if (call->gtCall.gtCallObjp == NULL)
+ GenTreeArgList firstForObjp(/*temp dummy arg*/ call, call->gtCallArgs);
+ if (call->gtCallObjp == NULL)
{
- args = call->gtCall.gtCallArgs;
+ args = call->gtCallArgs;
}
else
{
- firstForObjp.Current() = call->gtCall.gtCallObjp;
+ firstForObjp.Current() = call->gtCallObjp;
args = &firstForObjp;
}
// ARM and AMD64 uses this method to pass the stack based args
//
// returns size pushed (always zero)
-size_t CodeGen::genPushArgList(GenTreePtr call)
+size_t CodeGen::genPushArgList(GenTreeCall* call)
{
-
- GenTreeArgList* lateArgs = call->gtCall.gtCallLateArgs;
+ GenTreeArgList* lateArgs = call->gtCallLateArgs;
GenTreePtr curr;
var_types type;
int argSize;
// Create a local, artificial GenTreeArgList that includes the gtCallObjp, if that exists, as first argument,
// so we can iterate over this argument list more uniformly.
// Need to provide a temporary non-null first argument here: if we use this, we'll replace it.
- GenTreeArgList objpArgList(/*temp dummy arg*/ call, call->gtCall.gtCallArgs);
- if (call->gtCall.gtCallObjp == NULL)
+ GenTreeArgList objpArgList(/*temp dummy arg*/ call, call->gtCallArgs);
+ if (call->gtCallObjp == NULL)
{
- args = call->gtCall.gtCallArgs;
+ args = call->gtCallArgs;
}
else
{
- objpArgList.Current() = call->gtCall.gtCallObjp;
+ objpArgList.Current() = call->gtCallObjp;
args = &objpArgList;
}
return res;
}
-void CodeGen::SetupLateArgs(GenTreePtr call)
+void CodeGen::SetupLateArgs(GenTreeCall* call)
{
GenTreeArgList* lateArgs;
GenTreePtr curr;
/* Generate the code to move the late arguments into registers */
- for (lateArgs = call->gtCall.gtCallLateArgs; lateArgs; lateArgs = lateArgs->Rest())
+ for (lateArgs = call->gtCallLateArgs; lateArgs; lateArgs = lateArgs->Rest())
{
curr = lateArgs->Current();
assert(curr);
/* If any of the previously loaded arguments were spilled - reload them */
- for (lateArgs = call->gtCall.gtCallLateArgs; lateArgs; lateArgs = lateArgs->Rest())
+ for (lateArgs = call->gtCallLateArgs; lateArgs; lateArgs = lateArgs->Rest())
{
curr = lateArgs->Current();
assert(curr);
#endif // FEATURE_FIXED_OUT_ARGS
-regMaskTP CodeGen::genLoadIndirectCallTarget(GenTreePtr call)
+regMaskTP CodeGen::genLoadIndirectCallTarget(GenTreeCall* call)
{
- assert((gtCallTypes)call->gtCall.gtCallType == CT_INDIRECT);
+ assert((gtCallTypes)call->gtCallType == CT_INDIRECT);
regMaskTP fptrRegs;
}
/* Record the register(s) used for the indirect call func ptr */
- fptrRegs = genMakeRvalueAddressable(call->gtCall.gtCallAddr, prefRegs, RegSet::KEEP_REG, false);
+ fptrRegs = genMakeRvalueAddressable(call->gtCallAddr, prefRegs, RegSet::KEEP_REG, false);
/* If any of the previously loaded arguments were spilled, reload them */
/* Make sure the target is still addressable while avoiding the argument registers */
- fptrRegs = genKeepAddressable(call->gtCall.gtCallAddr, fptrRegs, argRegs);
+ fptrRegs = genKeepAddressable(call->gtCallAddr, fptrRegs, argRegs);
return fptrRegs;
}
#pragma warning(push)
#pragma warning(disable : 21000) // Suppress PREFast warning about overly large function
#endif
-regMaskTP CodeGen::genCodeForCall(GenTreePtr call, bool valUsed)
+regMaskTP CodeGen::genCodeForCall(GenTreeCall* call, bool valUsed)
{
emitAttr retSize;
size_t argSize;
}
#endif
- gtCallTypes callType = (gtCallTypes)call->gtCall.gtCallType;
+ gtCallTypes callType = (gtCallTypes)call->gtCallType;
IL_OFFSETX ilOffset = BAD_IL_OFFSET;
CORINFO_SIG_INFO* sigInfo = nullptr;
/* Make some sanity checks on the call node */
- // This is a call
- noway_assert(call->IsCall());
// "this" only makes sense for user functions
- noway_assert(call->gtCall.gtCallObjp == 0 || callType == CT_USER_FUNC || callType == CT_INDIRECT);
+ noway_assert(call->gtCallObjp == 0 || callType == CT_USER_FUNC || callType == CT_INDIRECT);
// tailcalls won't be done for helpers, caller-pop args, and check that
// the global flag is set
- noway_assert(!call->gtCall.IsTailCall() ||
+ noway_assert(!call->IsTailCall() ||
(callType != CT_HELPER && !(call->gtFlags & GTF_CALL_POP_ARGS) && compiler->compTailCallUsed));
#ifdef DEBUG
// native call sites with the signatures they were generated from.
if (callType != CT_HELPER)
{
- sigInfo = call->gtCall.callSig;
+ sigInfo = call->callSig;
}
#endif // DEBUG
/* Pass the arguments */
- if ((call->gtCall.gtCallObjp != NULL) || (call->gtCall.gtCallArgs != NULL))
+ if ((call->gtCallObjp != NULL) || (call->gtCallArgs != NULL))
{
argSize += genPushArgList(call);
}
/* Do not spill the argument registers.
Multi-use of RBM_ARG_REGS should be prevented by genPushArgList() */
- noway_assert((regSet.rsMaskMult & call->gtCall.gtCallRegUsedMask) == 0);
- spillRegs &= ~call->gtCall.gtCallRegUsedMask;
+ noway_assert((regSet.rsMaskMult & call->gtCallRegUsedMask) == 0);
+ spillRegs &= ~call->gtCallRegUsedMask;
if (spillRegs)
{
compCurFPState.Push(regReturn);
}
#else
- SpillForCallRegisterFP(call->gtCall.gtCallRegUsedMask);
+ SpillForCallRegisterFP(call->gtCallRegUsedMask);
#endif
/* If the method returns a GC ref, set size to EA_GCREF or EA_BYREF */
/* fire the event at the call site */
/* alas, right now I can only handle calls via a method handle */
- if (compiler->compIsProfilerHookNeeded() && (callType == CT_USER_FUNC) && call->gtCall.IsTailCall())
+ if (compiler->compIsProfilerHookNeeded() && (callType == CT_USER_FUNC) && call->IsTailCall())
{
unsigned saveStackLvl2 = genStackLevel;
#ifdef _TARGET_X86_
regMaskTP byrefPushedRegs;
regMaskTP norefPushedRegs;
- regMaskTP pushedArgRegs = genPushRegs(call->gtCall.gtCallRegUsedMask, &byrefPushedRegs, &norefPushedRegs);
+ regMaskTP pushedArgRegs = genPushRegs(call->gtCallRegUsedMask, &byrefPushedRegs, &norefPushedRegs);
if (compiler->compProfilerMethHndIndirected)
{
// check the stacks as frequently as possible
&& !call->IsHelperCall()
#else
- && call->gtCall.gtCallType == CT_USER_FUNC
+ && call->gtCallType == CT_USER_FUNC
#endif
)
{
bool fTailCallTargetIsVSD = false;
- bool fTailCall = (call->gtCall.gtCallMoreFlags & GTF_CALL_M_TAILCALL) != 0;
+ bool fTailCall = (call->gtCallMoreFlags & GTF_CALL_M_TAILCALL) != 0;
/* Check for Delegate.Invoke. If so, we inline it. We get the
target-object and target-function from the delegate-object, and do
an indirect call.
*/
- if ((call->gtCall.gtCallMoreFlags & GTF_CALL_M_DELEGATE_INV) && !fTailCall)
+ if ((call->gtCallMoreFlags & GTF_CALL_M_DELEGATE_INV) && !fTailCall)
{
- noway_assert(call->gtCall.gtCallType == CT_USER_FUNC);
+ noway_assert(call->gtCallType == CT_USER_FUNC);
- assert((compiler->info.compCompHnd->getMethodAttribs(call->gtCall.gtCallMethHnd) &
+ assert((compiler->info.compCompHnd->getMethodAttribs(call->gtCallMethHnd) &
(CORINFO_FLG_DELEGATE_INVOKE | CORINFO_FLG_FINAL)) ==
(CORINFO_FLG_DELEGATE_INVOKE | CORINFO_FLG_FINAL));
firstTgtOffs = pInfo->offsetOfDelegateFirstTarget;
#ifdef _TARGET_ARM_
- if ((call->gtCall.gtCallMoreFlags & GTF_CALL_M_SECURE_DELEGATE_INV))
+ if ((call->gtCallMoreFlags & GTF_CALL_M_SECURE_DELEGATE_INV))
{
getEmitter()->emitIns_R_R_I(INS_add, EA_PTRSIZE, REG_VIRTUAL_STUB_PARAM, regThis,
pInfo->offsetOfSecureDelegateIndirectCell);
// No need to null check the this pointer - the dispatch code will deal with this.
- noway_assert(genStillAddressable(call->gtCall.gtCallAddr));
+ noway_assert(genStillAddressable(call->gtCallAddr));
// Now put the address in REG_VIRTUAL_STUB_PARAM.
// This is typically a nop when the register used for
// the gtCallAddr is REG_VIRTUAL_STUB_PARAM
//
- inst_RV_TT(INS_mov, REG_VIRTUAL_STUB_PARAM, call->gtCall.gtCallAddr);
+ inst_RV_TT(INS_mov, REG_VIRTUAL_STUB_PARAM, call->gtCallAddr);
regTracker.rsTrackRegTrash(REG_VIRTUAL_STUB_PARAM);
#if defined(_TARGET_X86_)
emitCallType = emitter::EC_INDIR_ARD;
indReg = REG_VIRTUAL_STUB_PARAM;
- genDoneAddressable(call->gtCall.gtCallAddr, fptrRegs, RegSet::KEEP_REG);
+ genDoneAddressable(call->gtCallAddr, fptrRegs, RegSet::KEEP_REG);
#elif CPU_LOAD_STORE_ARCH // ARM doesn't allow us to use an indirection for the call
- genDoneAddressable(call->gtCall.gtCallAddr, fptrRegs, RegSet::KEEP_REG);
+ genDoneAddressable(call->gtCallAddr, fptrRegs, RegSet::KEEP_REG);
// Make the virtual stub call:
// ldr indReg, [REG_VIRTUAL_STUB_PARAM]
// Now dereference [REG_VIRTUAL_STUB_PARAM] and put it in a new temp register 'indReg'
//
indReg = regSet.rsGrabReg(RBM_ALLINT & ~RBM_VIRTUAL_STUB_PARAM);
- assert(call->gtCall.gtCallAddr->gtFlags & GTF_REG_VAL);
+ assert(call->gtCallAddr->gtFlags & GTF_REG_VAL);
getEmitter()->emitIns_R_R_I(INS_ldr, EA_PTRSIZE, indReg, REG_VIRTUAL_STUB_PARAM, 0);
regTracker.rsTrackRegTrash(indReg);
// Get stub addr. This will return NULL if virtual call stubs are not active
void* stubAddr = NULL;
- stubAddr = (void*)call->gtCall.gtStubCallStubAddr;
+ stubAddr = (void*)call->gtStubCallStubAddr;
noway_assert(stubAddr != NULL);
int disp = 0;
regNumber callReg = REG_NA;
- if (call->gtCall.gtCallMoreFlags & GTF_CALL_M_VIRTSTUB_REL_INDIRECT)
+ if (call->gtCallMoreFlags & GTF_CALL_M_VIRTSTUB_REL_INDIRECT)
{
#if CPU_LOAD_STORE_ARCH
callReg = regSet.rsGrabReg(RBM_VIRTUAL_STUB_PARAM);
if (callTypeStubAddr != emitter::EC_INDIR_R)
#endif
{
- getEmitter()->emitIns_Call(callTypeStubAddr, call->gtCall.gtCallMethHnd,
+ getEmitter()->emitIns_Call(callTypeStubAddr, call->gtCallMethHnd,
INDEBUG_LDISASM_COMMA(sigInfo) addr, args, retSize,
gcInfo.gcVarPtrSetCur, gcInfo.gcRegGCrefSetCur,
gcInfo.gcRegByrefSetCur, ilOffset, callReg, REG_NA, 0, disp);
if (callType == CT_INDIRECT)
{
- noway_assert(genStillAddressable(call->gtCall.gtCallAddr));
+ noway_assert(genStillAddressable(call->gtCallAddr));
// Now put the address in EAX.
- inst_RV_TT(INS_mov, REG_TAILCALL_ADDR, call->gtCall.gtCallAddr);
+ inst_RV_TT(INS_mov, REG_TAILCALL_ADDR, call->gtCallAddr);
regTracker.rsTrackRegTrash(REG_TAILCALL_ADDR);
- genDoneAddressable(call->gtCall.gtCallAddr, fptrRegs, RegSet::KEEP_REG);
+ genDoneAddressable(call->gtCallAddr, fptrRegs, RegSet::KEEP_REG);
}
else
{
// importer/EE should guarantee the indirection
- noway_assert(call->gtCall.gtCallMoreFlags & GTF_CALL_M_VIRTSTUB_REL_INDIRECT);
+ noway_assert(call->gtCallMoreFlags & GTF_CALL_M_VIRTSTUB_REL_INDIRECT);
instGen_Set_Reg_To_Imm(EA_HANDLE_CNS_RELOC, REG_TAILCALL_ADDR,
- ssize_t(call->gtCall.gtStubCallStubAddr));
+ ssize_t(call->gtStubCallStubAddr));
}
fTailCallTargetIsVSD = true;
VPTR_OFFS);
regTracker.rsTrackRegTrash(vptrReg);
- noway_assert(vptrMask & ~call->gtCall.gtCallRegUsedMask);
+ noway_assert(vptrMask & ~call->gtCallRegUsedMask);
/* Get hold of the vtable offset (note: this might be expensive) */
- compiler->info.compCompHnd->getMethodVTableOffset(call->gtCall.gtCallMethHnd,
- &vtabOffsOfIndirection,
+ compiler->info.compCompHnd->getMethodVTableOffset(call->gtCallMethHnd, &vtabOffsOfIndirection,
&vtabOffsAfterIndirection);
/* Get the appropriate vtable chunk */
getEmitter()->emitIns_R_AR(ins_Load(TYP_I_IMPL), EA_PTRSIZE, vptrReg, vptrReg,
vtabOffsAfterIndirection);
- getEmitter()->emitIns_Call(emitter::EC_INDIR_R, call->gtCall.gtCallMethHnd,
+ getEmitter()->emitIns_Call(emitter::EC_INDIR_R, call->gtCallMethHnd,
INDEBUG_LDISASM_COMMA(sigInfo) NULL, // addr
args, retSize, gcInfo.gcVarPtrSetCur, gcInfo.gcRegGCrefSetCur,
gcInfo.gcRegByrefSetCur, ilOffset,
vptrReg); // ireg
#else
- getEmitter()->emitIns_Call(emitter::EC_FUNC_VIRTUAL, call->gtCall.gtCallMethHnd,
+ getEmitter()->emitIns_Call(emitter::EC_FUNC_VIRTUAL, call->gtCallMethHnd,
INDEBUG_LDISASM_COMMA(sigInfo) NULL, // addr
args, retSize, gcInfo.gcVarPtrSetCur, gcInfo.gcRegGCrefSetCur,
gcInfo.gcRegByrefSetCur, ilOffset,
// - Indirect calls to computed addresses
// - Tailcall versions of all of the above
- CORINFO_METHOD_HANDLE methHnd = call->gtCall.gtCallMethHnd;
+ CORINFO_METHOD_HANDLE methHnd = call->gtCallMethHnd;
//------------------------------------------------------
// Non-virtual/Indirect calls: Insert a null check on the "this" pointer if needed
if (callType == CT_INDIRECT)
{
- noway_assert(genStillAddressable(call->gtCall.gtCallAddr));
+ noway_assert(genStillAddressable(call->gtCallAddr));
- if (call->gtCall.gtCallAddr->gtFlags & GTF_REG_VAL)
- indCallReg = call->gtCall.gtCallAddr->gtRegNum;
+ if (call->gtCallAddr->gtFlags & GTF_REG_VAL)
+ indCallReg = call->gtCallAddr->gtRegNum;
nArgSize = (call->gtFlags & GTF_CALL_POP_ARGS) ? 0 : (int)argSize;
methHnd = 0;
anyways.
*/
- inst_RV_TT(INS_mov, indCallReg, call->gtCall.gtCallAddr);
+ inst_RV_TT(INS_mov, indCallReg, call->gtCallAddr);
regTracker.rsTrackRegTrash(indCallReg);
}
ilOffset, indCallReg);
if (callType == CT_INDIRECT)
- genDoneAddressable(call->gtCall.gtCallAddr, fptrRegs, RegSet::KEEP_REG);
+ genDoneAddressable(call->gtCallAddr, fptrRegs, RegSet::KEEP_REG);
getEmitter()->emitEnableRandomNops();
if (callType == CT_INDIRECT)
{
- noway_assert(genStillAddressable(call->gtCall.gtCallAddr));
+ noway_assert(genStillAddressable(call->gtCallAddr));
- if (call->gtCall.gtCallCookie)
+ if (call->gtCallCookie)
{
//------------------------------------------------------
// Non-virtual indirect calls via the P/Invoke stub
- GenTreePtr cookie = call->gtCall.gtCallCookie;
- GenTreePtr target = call->gtCall.gtCallAddr;
+ GenTreePtr cookie = call->gtCallCookie;
+ GenTreePtr target = call->gtCallAddr;
noway_assert((call->gtFlags & GTF_CALL_POP_ARGS) == 0);
// Ensure that we don't trash any of these registers if we have to load
// the helper call target into a register to invoke it.
regMaskTP regsUsed;
- regSet.rsLockReg(call->gtCall.gtCallRegUsedMask | RBM_PINVOKE_TARGET_PARAM |
- RBM_PINVOKE_COOKIE_PARAM,
+ regSet.rsLockReg(call->gtCallRegUsedMask | RBM_PINVOKE_TARGET_PARAM | RBM_PINVOKE_COOKIE_PARAM,
®sUsed);
#else
NYI("Non-virtual indirect calls via the P/Invoke stub");
genEmitHelperCall(CORINFO_HELP_PINVOKE_CALLI, (int)args, retSize);
#if defined(_TARGET_ARM_)
- regSet.rsUnlockReg(call->gtCall.gtCallRegUsedMask | RBM_PINVOKE_TARGET_PARAM |
+ regSet.rsUnlockReg(call->gtCallRegUsedMask | RBM_PINVOKE_TARGET_PARAM |
RBM_PINVOKE_COOKIE_PARAM,
regsUsed);
#endif
if (fTailCall)
{
- inst_RV_TT(INS_mov, REG_TAILCALL_ADDR, call->gtCall.gtCallAddr);
+ inst_RV_TT(INS_mov, REG_TAILCALL_ADDR, call->gtCallAddr);
regTracker.rsTrackRegTrash(REG_TAILCALL_ADDR);
}
else
instEmit_indCall(call, args, retSize);
}
- genDoneAddressable(call->gtCall.gtCallAddr, fptrRegs, RegSet::KEEP_REG);
+ genDoneAddressable(call->gtCallAddr, fptrRegs, RegSet::KEEP_REG);
// Done with indirect calls
break;
CORINFO_ACCESS_FLAGS aflags = CORINFO_ACCESS_ANY;
- if (call->gtCall.gtCallMoreFlags & GTF_CALL_M_NONVIRT_SAME_THIS)
+ if (call->gtCallMoreFlags & GTF_CALL_M_NONVIRT_SAME_THIS)
aflags = (CORINFO_ACCESS_FLAGS)(aflags | CORINFO_ACCESS_THIS);
if ((call->gtFlags & GTF_CALL_NULLCHECK) == 0)
getEmitter()->emitCurIGsize + // size of the current IG
4; // size of the jump instruction
// that we are now emitting
- if (compiler->gtIsRecursiveCall(call->AsCall()) && codeOffset <= -CALL_DIST_MAX_NEG)
+ if (compiler->gtIsRecursiveCall(call) && codeOffset <= -CALL_DIST_MAX_NEG)
{
getEmitter()->emitIns_Call(emitter::EC_FUNC_TOKEN, methHnd,
INDEBUG_LDISASM_COMMA(sigInfo) NULL, // addr
regMaskTP curArgMask = genMapArgNumToRegMask(areg, TYP_INT);
// Is this one of the used argument registers?
- if ((curArgMask & call->gtCall.gtCallRegUsedMask) == 0)
+ if ((curArgMask & call->gtCallRegUsedMask) == 0)
continue;
#ifdef _TARGET_ARM_
regMaskTP curArgMask = genMapArgNumToRegMask(areg, TYP_FLOAT);
// Is this one of the used argument registers?
- if ((curArgMask & call->gtCall.gtCallRegUsedMask) == 0)
+ if ((curArgMask & call->gtCallRegUsedMask) == 0)
continue;
regSet.rsMaskUsed &= ~curArgMask;
if (call->gtType == TYP_FLOAT || call->gtType == TYP_DOUBLE)
{
#ifdef _TARGET_ARM_
- if (call->gtCall.IsVarargs() || compiler->opts.compUseSoftFP)
+ if (call->IsVarargs() || compiler->opts.compUseSoftFP)
{
// Result return for vararg methods is in r0, r1, but our callers would
// expect the return in s0, s1 because of floating type. Do the move now.
#ifdef _TARGET_ARM_
case TYP_STRUCT:
{
- assert(call->gtCall.gtRetClsHnd != NULL);
- assert(compiler->IsHfa(call->gtCall.gtRetClsHnd));
- int retSlots = compiler->GetHfaCount(call->gtCall.gtRetClsHnd);
+ assert(call->gtRetClsHnd != NULL);
+ assert(compiler->IsHfa(call->gtRetClsHnd));
+ int retSlots = compiler->GetHfaCount(call->gtRetClsHnd);
assert(retSlots > 0 && retSlots <= MAX_HFA_RET_SLOTS);
assert(MAX_HFA_RET_SLOTS < sizeof(int) * 8);
retVal = ((1 << retSlots) - 1) << REG_FLOATRET;
if (frameListRoot)
genPInvokeCallEpilog(frameListRoot, retVal);
- if (frameListRoot && (call->gtCall.gtCallMoreFlags & GTF_CALL_M_FRAME_VAR_DEATH))
+ if (frameListRoot && (call->gtCallMoreFlags & GTF_CALL_M_FRAME_VAR_DEATH))
{
if (frameListRoot->lvRegister)
{
// check the stack as frequently as possible
&& !call->IsHelperCall()
#else
- && call->gtCall.gtCallType == CT_USER_FUNC
+ && call->gtCallType == CT_USER_FUNC
#endif
)
{
bool genEmitOptimizedGCWriteBarrier(GCInfo::WriteBarrierForm writeBarrierForm, GenTree* addr, GenTree* data);
-void genCallInstruction(GenTreePtr call);
+void genCallInstruction(GenTreeCall* call);
void genJmpMethod(GenTreePtr jmp);
break;
case GT_CALL:
- genCallInstruction(treeNode);
+ genCallInstruction(treeNode->AsCall());
break;
case GT_JMP:
}
// Produce code for a GT_CALL node
-void CodeGen::genCallInstruction(GenTreePtr node)
+void CodeGen::genCallInstruction(GenTreeCall* call)
{
- GenTreeCall* call = node->AsCall();
- assert(call->gtOper == GT_CALL);
-
gtCallTypes callType = (gtCallTypes)call->gtCallType;
IL_OFFSETX ilOffset = BAD_IL_OFFSET;
if (callType == CT_INDIRECT)
{
assert(target == nullptr);
- target = call->gtCall.gtCallAddr;
+ target = call->gtCallAddr;
methHnd = nullptr;
}
else
class fgArgInfo
{
- Compiler* compiler; // Back pointer to the compiler instance so that we can allocate memory
- GenTreePtr callTree; // Back pointer to the GT_CALL node for this fgArgInfo
- unsigned argCount; // Updatable arg count value
- unsigned nextSlotNum; // Updatable slot count value
- unsigned stkLevel; // Stack depth when we make this call (for x86)
+ Compiler* compiler; // Back pointer to the compiler instance so that we can allocate memory
+ GenTreeCall* callTree; // Back pointer to the GT_CALL node for this fgArgInfo
+ unsigned argCount; // Updatable arg count value
+ unsigned nextSlotNum; // Updatable slot count value
+ unsigned stkLevel; // Stack depth when we make this call (for x86)
#if defined(UNIX_X86_ABI)
unsigned padStkAlign; // Count of number of padding slots for stack alignment. This value is used to turn back
// stack pointer before it was adjusted after each Call
void AddArg(fgArgTabEntryPtr curArgTabEntry);
public:
- fgArgInfo(Compiler* comp, GenTreePtr call, unsigned argCount);
- fgArgInfo(GenTreePtr newCall, GenTreePtr oldCall);
+ fgArgInfo(Compiler* comp, GenTreeCall* call, unsigned argCount);
+ fgArgInfo(GenTreeCall* newCall, GenTreeCall* oldCall);
fgArgTabEntryPtr AddRegArg(
unsigned argNum, GenTreePtr node, GenTreePtr parent, regNumber regNum, unsigned numRegs, unsigned alignment);
GenTreeArgList* gtNewArgList(GenTreePtr op1, GenTreePtr op2);
GenTreeArgList* gtNewArgList(GenTreePtr op1, GenTreePtr op2, GenTreePtr op3);
- static fgArgTabEntryPtr gtArgEntryByArgNum(GenTreePtr call, unsigned argNum);
- static fgArgTabEntryPtr gtArgEntryByNode(GenTreePtr call, GenTreePtr node);
- fgArgTabEntryPtr gtArgEntryByLateArgIndex(GenTreePtr call, unsigned lateArgInx);
+ static fgArgTabEntryPtr gtArgEntryByArgNum(GenTreeCall* call, unsigned argNum);
+ static fgArgTabEntryPtr gtArgEntryByNode(GenTreeCall* call, GenTreePtr node);
+ fgArgTabEntryPtr gtArgEntryByLateArgIndex(GenTreeCall* call, unsigned lateArgInx);
bool gtArgIsThisPtr(fgArgTabEntryPtr argEntry);
GenTreePtr gtNewAssignNode(GenTreePtr dst, GenTreePtr src);
unsigned flags = GTF_SIDE_EFFECT,
bool ignoreRoot = false);
- GenTreePtr gtGetThisArg(GenTreePtr call);
+ GenTreePtr gtGetThisArg(GenTreeCall* call);
// Static fields of struct types (and sometimes the types that those are reduced to) are represented by having the
// static field contain an object pointer to the boxed struct. This simplifies the GC implementation...but
char* gtGetLclVarName(unsigned lclNum);
void gtDispLclVar(unsigned varNum, bool padForBiggestDisp = true);
void gtDispTreeList(GenTreePtr tree, IndentStack* indentStack = nullptr);
- void gtGetArgMsg(GenTreePtr call, GenTreePtr arg, unsigned argNum, int listCount, char* bufp, unsigned bufLength);
- void gtGetLateArgMsg(GenTreePtr call, GenTreePtr arg, int argNum, int listCount, char* bufp, unsigned bufLength);
- void gtDispArgList(GenTreePtr tree, IndentStack* indentStack);
+ void gtGetArgMsg(GenTreeCall* call, GenTreePtr arg, unsigned argNum, int listCount, char* bufp, unsigned bufLength);
+ void gtGetLateArgMsg(GenTreeCall* call, GenTreePtr arg, int argNum, int listCount, char* bufp, unsigned bufLength);
+ void gtDispArgList(GenTreeCall* call, IndentStack* indentStack);
void gtDispFieldSeq(FieldSeqNode* pfsn);
void gtDispRange(LIR::ReadOnlyRange const& range);
bool impCanPInvokeInline();
bool impCanPInvokeInlineCallSite(BasicBlock* block);
void impCheckForPInvokeCall(
- GenTreePtr call, CORINFO_METHOD_HANDLE methHnd, CORINFO_SIG_INFO* sig, unsigned mflags, BasicBlock* block);
- GenTreePtr impImportIndirectCall(CORINFO_SIG_INFO* sig, IL_OFFSETX ilOffset = BAD_IL_OFFSET);
+ GenTreeCall* call, CORINFO_METHOD_HANDLE methHnd, CORINFO_SIG_INFO* sig, unsigned mflags, BasicBlock* block);
+ GenTreeCall* impImportIndirectCall(CORINFO_SIG_INFO* sig, IL_OFFSETX ilOffset = BAD_IL_OFFSET);
void impPopArgsForUnmanagedCall(GenTreePtr call, CORINFO_SIG_INFO* sig);
void impInsertHelperCall(CORINFO_HELPER_DESC* helperCall);
bool impMethodInfo_hasRetBuffArg(CORINFO_METHOD_INFO* methInfo);
- GenTreePtr impFixupCallStructReturn(GenTreePtr call, CORINFO_CLASS_HANDLE retClsHnd);
+ GenTreePtr impFixupCallStructReturn(GenTreeCall* call, CORINFO_CLASS_HANDLE retClsHnd);
GenTreePtr impFixupStructReturnType(GenTreePtr op, CORINFO_CLASS_HANDLE retClsHnd);
GenTreePtr impReadyToRunLookupToTree(CORINFO_CONST_LOOKUP* pLookup, unsigned flags, void* compileTimeHandle);
- GenTreePtr impReadyToRunHelperToTree(CORINFO_RESOLVED_TOKEN* pResolvedToken,
- CorInfoHelpFunc helper,
- var_types type,
- GenTreeArgList* arg = nullptr,
- CORINFO_LOOKUP_KIND* pGenericLookupKind = nullptr);
+ GenTreeCall* impReadyToRunHelperToTree(CORINFO_RESOLVED_TOKEN* pResolvedToken,
+ CorInfoHelpFunc helper,
+ var_types type,
+ GenTreeArgList* arg = nullptr,
+ CORINFO_LOOKUP_KIND* pGenericLookupKind = nullptr);
GenTreePtr impCastClassOrIsInstToTree(GenTreePtr op1,
GenTreePtr op2,
GenTreePtr fgInitThisClass();
- GenTreePtr fgGetStaticsCCtorHelper(CORINFO_CLASS_HANDLE cls, CorInfoHelpFunc helper);
+ GenTreeCall* fgGetStaticsCCtorHelper(CORINFO_CLASS_HANDLE cls, CorInfoHelpFunc helper);
- GenTreePtr fgGetSharedCCtor(CORINFO_CLASS_HANDLE cls);
+ GenTreeCall* fgGetSharedCCtor(CORINFO_CLASS_HANDLE cls);
void fgLocalVarLiveness();
void fgNoteNonInlineCandidate(GenTreeStmt* stmt, GenTreeCall* call);
static fgWalkPreFn fgFindNonInlineCandidate;
#endif
- GenTreePtr fgOptimizeDelegateConstructor(GenTreePtr call, CORINFO_CONTEXT_HANDLE* ExactContextHnd);
+ GenTreePtr fgOptimizeDelegateConstructor(GenTreeCall* call, CORINFO_CONTEXT_HANDLE* ExactContextHnd);
GenTreePtr fgMorphLeaf(GenTreePtr tree);
void fgAssignSetVarDef(GenTreePtr tree);
GenTreePtr fgMorphOneAsgBlockOp(GenTreePtr tree);
static fgWalkPreFn gtHasLocalsWithAddrOpCB;
bool gtCanOptimizeTypeEquality(GenTreePtr tree);
- bool gtIsTypeHandleToRuntimeTypeHelper(GenTreePtr tree);
+ bool gtIsTypeHandleToRuntimeTypeHelper(GenTreeCall* call);
bool gtIsActiveCSE_Candidate(GenTreePtr tree);
#ifdef DEBUG
callInterf ivaMaskCall; // What kind of calls are there?
};
- static callInterf optCallInterf(GenTreePtr call);
+ static callInterf optCallInterf(GenTreeCall* call);
public:
// VN based copy propagation.
GenTreePtr optAssertionProp_LclVar(ASSERT_VALARG_TP assertions, const GenTreePtr tree, const GenTreePtr stmt);
GenTreePtr optAssertionProp_Ind(ASSERT_VALARG_TP assertions, const GenTreePtr tree, const GenTreePtr stmt);
GenTreePtr optAssertionProp_Cast(ASSERT_VALARG_TP assertions, const GenTreePtr tree, const GenTreePtr stmt);
- GenTreePtr optAssertionProp_Call(ASSERT_VALARG_TP assertions, const GenTreePtr tree, const GenTreePtr stmt);
+ GenTreePtr optAssertionProp_Call(ASSERT_VALARG_TP assertions, GenTreeCall* call, const GenTreePtr stmt);
GenTreePtr optAssertionProp_RelOp(ASSERT_VALARG_TP assertions, const GenTreePtr tree, const GenTreePtr stmt);
GenTreePtr optAssertionProp_Comma(ASSERT_VALARG_TP assertions, const GenTreePtr tree, const GenTreePtr stmt);
GenTreePtr optAssertionProp_BndsChk(ASSERT_VALARG_TP assertions, const GenTreePtr tree, const GenTreePtr stmt);
GenTreePtr optAssertionPropGlobal_RelOp(ASSERT_VALARG_TP assertions, const GenTreePtr tree, const GenTreePtr stmt);
GenTreePtr optAssertionPropLocal_RelOp(ASSERT_VALARG_TP assertions, const GenTreePtr tree, const GenTreePtr stmt);
GenTreePtr optAssertionProp_Update(const GenTreePtr newTree, const GenTreePtr tree, const GenTreePtr stmt);
- GenTreePtr optNonNullAssertionProp_Call(ASSERT_VALARG_TP assertions, const GenTreePtr tree, const GenTreePtr stmt);
+ GenTreePtr optNonNullAssertionProp_Call(ASSERT_VALARG_TP assertions, GenTreeCall* call, const GenTreePtr stmt);
// Implied assertion functions.
void optImpliedAssertions(AssertionIndex assertionIndex, ASSERT_TP& activeAssertions);
if (GCPOLL_CALL == pollType)
{
createdPollBlocks = false;
- GenTreePtr tree = gtNewHelperCallNode(CORINFO_HELP_POLL_GC, TYP_VOID);
+ GenTreeCall* call = gtNewHelperCallNode(CORINFO_HELP_POLL_GC, TYP_VOID);
#if GTF_CALL_REG_SAVE
- tree->gtCall.gtCallMoreFlags |= GTF_CALL_REG_SAVE;
+ call->gtCallMoreFlags |= GTF_CALL_REG_SAVE;
#endif // GTF_CALL_REG_SAVE
// for BBJ_ALWAYS I don't need to insert it before the condition. Just append it.
if (block->bbJumpKind == BBJ_ALWAYS)
{
- fgInsertStmtAtEnd(block, tree);
+ fgInsertStmtAtEnd(block, call);
}
else
{
- GenTreeStmt* newStmt = fgInsertStmtNearEnd(block, tree);
+ GenTreeStmt* newStmt = fgInsertStmtNearEnd(block, call);
// For DDB156656, we need to associate the GC Poll with the IL offset (and therefore sequence
// point) of the tree before which we inserted the poll. One example of when this is a
// problem:
bottom->bbJumpDest = top->bbJumpDest;
// 2) Add a GC_CALL node to Poll.
- GenTreePtr tree = gtNewHelperCallNode(CORINFO_HELP_POLL_GC, TYP_VOID);
+ GenTreeCall* call = gtNewHelperCallNode(CORINFO_HELP_POLL_GC, TYP_VOID);
#if GTF_CALL_REG_SAVE
- tree->gtCall.gtCallMoreFlags |= GTF_CALL_REG_SAVE;
+ call->gtCallMoreFlags |= GTF_CALL_REG_SAVE;
#endif // GTF_CALL_REG_SAVE
- fgInsertStmtAtEnd(poll, tree);
+ fgInsertStmtAtEnd(poll, call);
// 3) Remove the last statement from Top and add it to Bottom.
if (oldJumpKind != BBJ_ALWAYS)
return res;
}
-GenTreePtr Compiler::fgGetStaticsCCtorHelper(CORINFO_CLASS_HANDLE cls, CorInfoHelpFunc helper)
+GenTreeCall* Compiler::fgGetStaticsCCtorHelper(CORINFO_CLASS_HANDLE cls, CorInfoHelpFunc helper)
{
bool bNeedClassID = true;
unsigned callFlags = 0;
return gtNewHelperCallNode(helper, type, callFlags, argList);
}
-GenTreePtr Compiler::fgGetSharedCCtor(CORINFO_CLASS_HANDLE cls)
+GenTreeCall* Compiler::fgGetSharedCCtor(CORINFO_CLASS_HANDLE cls)
{
#ifdef FEATURE_READYTORUN_COMPILER
if (opts.IsReadyToRun())
* Optimize the call to the delegate constructor.
*/
-GenTreePtr Compiler::fgOptimizeDelegateConstructor(GenTreePtr call, CORINFO_CONTEXT_HANDLE* ExactContextHnd)
+GenTreePtr Compiler::fgOptimizeDelegateConstructor(GenTreeCall* call, CORINFO_CONTEXT_HANDLE* ExactContextHnd)
{
- noway_assert(call->gtOper == GT_CALL);
-
- noway_assert(call->gtCall.gtCallType == CT_USER_FUNC);
- CORINFO_METHOD_HANDLE methHnd = call->gtCall.gtCallMethHnd;
+ noway_assert(call->gtCallType == CT_USER_FUNC);
+ CORINFO_METHOD_HANDLE methHnd = call->gtCallMethHnd;
CORINFO_CLASS_HANDLE clsHnd = info.compCompHnd->getMethodClass(methHnd);
- GenTreePtr targetMethod = call->gtCall.gtCallArgs->gtOp.gtOp2->gtOp.gtOp1;
+ GenTreePtr targetMethod = call->gtCallArgs->gtOp.gtOp2->gtOp.gtOp1;
noway_assert(targetMethod->TypeGet() == TYP_I_IMPL);
genTreeOps oper = targetMethod->OperGet();
if (oper == GT_FTN_ADDR || oper == GT_CALL || oper == GT_QMARK)
// handle.
noway_assert(qmarkNode->gtOp.gtOp2->OperGet() == GT_COLON);
noway_assert(qmarkNode->gtOp.gtOp2->gtOp.gtOp1->OperGet() == GT_CALL);
- GenTreePtr runtimeLookupCall = qmarkNode->gtOp.gtOp2->gtOp.gtOp1;
+ GenTreeCall* runtimeLookupCall = qmarkNode->gtOp.gtOp2->gtOp.gtOp1->AsCall();
// This could be any of CORINFO_HELP_RUNTIMEHANDLE_(METHOD|CLASS)(_LOG?)
- GenTreePtr tokenNode = runtimeLookupCall->gtCall.gtCallArgs->gtOp.gtOp2->gtOp.gtOp1;
+ GenTreePtr tokenNode = runtimeLookupCall->gtCallArgs->gtOp.gtOp2->gtOp.gtOp1;
noway_assert(tokenNode->OperGet() == GT_CNS_INT);
targetMethodHnd = CORINFO_METHOD_HANDLE(tokenNode->gtIntCon.gtCompileTimeHandle);
}
if (oper == GT_FTN_ADDR)
{
// The first argument of the helper is delegate this pointer
- GenTreeArgList* helperArgs = gtNewArgList(call->gtCall.gtCallObjp);
+ GenTreeArgList* helperArgs = gtNewArgList(call->gtCallObjp);
CORINFO_CONST_LOOKUP entryPoint;
// The second argument of the helper is the target object pointers
- helperArgs->gtOp.gtOp2 = gtNewArgList(call->gtCall.gtCallArgs->gtOp.gtOp1);
+ helperArgs->gtOp.gtOp2 = gtNewArgList(call->gtCallArgs->gtOp.gtOp1);
call = gtNewHelperCallNode(CORINFO_HELP_READYTORUN_DELEGATE_CTOR, TYP_VOID, GTF_EXCEPT, helperArgs);
#if COR_JIT_EE_VERSION > 460
info.compCompHnd->getReadyToRunHelper(targetMethod->gtFptrVal.gtLdftnResolvedToken,
CORINFO_HELP_READYTORUN_DELEGATE_CTOR, &entryPoint);
#endif
- call->gtCall.setEntryPoint(entryPoint);
+ call->setEntryPoint(entryPoint);
}
}
else
// and in fact it will pass the wrong info to the inliner code
*ExactContextHnd = nullptr;
- call->gtCall.gtCallMethHnd = alternateCtor;
+ call->gtCallMethHnd = alternateCtor;
- noway_assert(call->gtCall.gtCallArgs->gtOp.gtOp2->gtOp.gtOp2 == nullptr);
+ noway_assert(call->gtCallArgs->gtOp.gtOp2->gtOp.gtOp2 == nullptr);
if (ctorData.pArg3)
{
- call->gtCall.gtCallArgs->gtOp.gtOp2->gtOp.gtOp2 =
+ call->gtCallArgs->gtOp.gtOp2->gtOp.gtOp2 =
gtNewArgList(gtNewIconHandleNode(size_t(ctorData.pArg3), GTF_ICON_FTN_ADDR));
if (ctorData.pArg4)
{
- call->gtCall.gtCallArgs->gtOp.gtOp2->gtOp.gtOp2->gtOp.gtOp2 =
+ call->gtCallArgs->gtOp.gtOp2->gtOp.gtOp2->gtOp.gtOp2 =
gtNewArgList(gtNewIconHandleNode(size_t(ctorData.pArg4), GTF_ICON_FTN_ADDR));
if (ctorData.pArg5)
{
- call->gtCall.gtCallArgs->gtOp.gtOp2->gtOp.gtOp2->gtOp.gtOp2->gtOp.gtOp2 =
+ call->gtCallArgs->gtOp.gtOp2->gtOp.gtOp2->gtOp.gtOp2->gtOp.gtOp2 =
gtNewArgList(gtNewIconHandleNode(size_t(ctorData.pArg5), GTF_ICON_FTN_ADDR));
}
}
if ((treeFlags & GTF_EXCEPT) && !(chkFlags & GTF_EXCEPT))
{
- switch (eeGetHelperNum(tree->gtCall.gtCallMethHnd))
+ switch (eeGetHelperNum(call->gtCallMethHnd))
{
// Is this a helper call that can throw an exception ?
case CORINFO_HELP_LDIV:
// If there is non-NULL return, replace the GT_CALL with its return value expression,
// so later it will be picked up by the GT_RET_EXPR node.
- if ((pInlineInfo->inlineCandidateInfo->fncRetType != TYP_VOID) || (iciCall->gtCall.gtReturnType == TYP_STRUCT))
+ if ((pInlineInfo->inlineCandidateInfo->fncRetType != TYP_VOID) || (iciCall->gtReturnType == TYP_STRUCT))
{
noway_assert(pInlineInfo->retExpr);
#ifdef DEBUG
GenTreeStmt* postStmt = callStmt->gtNextStmt;
GenTreePtr afterStmt = callStmt; // afterStmt is the place where the new statements should be inserted after.
GenTreePtr newStmt = nullptr;
- GenTreePtr call = inlineInfo->iciCall;
+ GenTreeCall* call = inlineInfo->iciCall->AsCall();
noway_assert(call->gtOper == GT_CALL);
* that has the matching argNum and return the fgArgTableEntryPtr
*/
-fgArgTabEntryPtr Compiler::gtArgEntryByArgNum(GenTreePtr call, unsigned argNum)
+fgArgTabEntryPtr Compiler::gtArgEntryByArgNum(GenTreeCall* call, unsigned argNum)
{
- noway_assert(call->IsCall());
- fgArgInfoPtr argInfo = call->gtCall.fgArgInfo;
+ fgArgInfoPtr argInfo = call->fgArgInfo;
noway_assert(argInfo != nullptr);
unsigned argCount = argInfo->ArgCount();
* that has the matching node and return the fgArgTableEntryPtr
*/
-fgArgTabEntryPtr Compiler::gtArgEntryByNode(GenTreePtr call, GenTreePtr node)
+fgArgTabEntryPtr Compiler::gtArgEntryByNode(GenTreeCall* call, GenTreePtr node)
{
- noway_assert(call->IsCall());
- fgArgInfoPtr argInfo = call->gtCall.fgArgInfo;
+ fgArgInfoPtr argInfo = call->fgArgInfo;
noway_assert(argInfo != nullptr);
unsigned argCount = argInfo->ArgCount();
}
else // (curArgTabEntry->parent == NULL)
{
- if (call->gtCall.gtCallObjp == node)
+ if (call->gtCallObjp == node)
{
return curArgTabEntry;
}
* Find and return the entry with the given "lateArgInx". Requires that one is found
* (asserts this).
*/
-fgArgTabEntryPtr Compiler::gtArgEntryByLateArgIndex(GenTreePtr call, unsigned lateArgInx)
+fgArgTabEntryPtr Compiler::gtArgEntryByLateArgIndex(GenTreeCall* call, unsigned lateArgInx)
{
- noway_assert(call->IsCall());
- fgArgInfoPtr argInfo = call->gtCall.fgArgInfo;
+ fgArgInfoPtr argInfo = call->fgArgInfo;
noway_assert(argInfo != nullptr);
unsigned argCount = argInfo->ArgCount();
if (tree->gtCall.fgArgInfo)
{
// Create and initialize the fgArgInfo for our copy of the call tree
- copy->gtCall.fgArgInfo = new (this, CMK_Unknown) fgArgInfo(copy, tree);
+ copy->gtCall.fgArgInfo = new (this, CMK_Unknown) fgArgInfo(copy->AsCall(), tree->AsCall());
}
else
{
return false;
}
-GenTreePtr Compiler::gtGetThisArg(GenTreePtr call)
+GenTreePtr Compiler::gtGetThisArg(GenTreeCall* call)
{
- assert(call->gtOper == GT_CALL);
-
- if (call->gtCall.gtCallObjp != nullptr)
+ if (call->gtCallObjp != nullptr)
{
- if (call->gtCall.gtCallObjp->gtOper != GT_NOP && call->gtCall.gtCallObjp->gtOper != GT_ASG)
+ if (call->gtCallObjp->gtOper != GT_NOP && call->gtCallObjp->gtOper != GT_ASG)
{
- if (!(call->gtCall.gtCallObjp->gtFlags & GTF_LATE_ARG))
+ if (!(call->gtCallObjp->gtFlags & GTF_LATE_ARG))
{
- return call->gtCall.gtCallObjp;
+ return call->gtCallObjp;
}
}
- if (call->gtCall.gtCallLateArgs)
+ if (call->gtCallLateArgs)
{
regNumber thisReg = REG_ARG_0;
unsigned argNum = 0;
GenTreePtr result = thisArgTabEntry->node;
#if !FEATURE_FIXED_OUT_ARGS
- GenTreePtr lateArgs = call->gtCall.gtCallLateArgs;
- regList list = call->gtCall.regArgList;
+ GenTreePtr lateArgs = call->gtCallLateArgs;
+ regList list = call->regArgList;
int index = 0;
while (lateArgs != NULL)
{
assert(lateArgs->gtOper == GT_LIST);
- assert(index < call->gtCall.regArgListCount);
+ assert(index < call->regArgListCount);
regNumber curArgReg = list[index];
if (curArgReg == thisReg)
{
case GT_CALL:
{
- assert(tree->gtFlags & GTF_CALL);
- unsigned numChildren = tree->NumChildren();
+ GenTreeCall* call = tree->AsCall();
+ assert(call->gtFlags & GTF_CALL);
+ unsigned numChildren = call->NumChildren();
GenTree* lastChild = nullptr;
if (numChildren != 0)
{
- lastChild = tree->GetChild(numChildren - 1);
+ lastChild = call->GetChild(numChildren - 1);
}
- if (tree->gtCall.gtCallType != CT_INDIRECT)
+ if (call->gtCallType != CT_INDIRECT)
{
const char* methodName;
const char* className;
- methodName = eeGetMethodName(tree->gtCall.gtCallMethHnd, &className);
+ methodName = eeGetMethodName(call->gtCallMethHnd, &className);
printf(" %s.%s", className, methodName);
}
- if ((tree->gtFlags & GTF_CALL_UNMANAGED) && (tree->gtCall.gtCallMoreFlags & GTF_CALL_M_FRAME_VAR_DEATH))
+ if ((call->gtFlags & GTF_CALL_UNMANAGED) && (call->gtCallMoreFlags & GTF_CALL_M_FRAME_VAR_DEATH))
{
printf(" (FramesRoot last use)");
}
- if (((tree->gtFlags & GTF_CALL_INLINE_CANDIDATE) != 0) && (tree->gtCall.gtInlineCandidateInfo != nullptr) &&
- (tree->gtCall.gtInlineCandidateInfo->exactContextHnd != nullptr))
+ if (((call->gtFlags & GTF_CALL_INLINE_CANDIDATE) != 0) && (call->gtInlineCandidateInfo != nullptr) &&
+ (call->gtInlineCandidateInfo->exactContextHnd != nullptr))
{
- printf(" (exactContextHnd=0x%p)", dspPtr(tree->gtCall.gtInlineCandidateInfo->exactContextHnd));
+ printf(" (exactContextHnd=0x%p)", dspPtr(call->gtInlineCandidateInfo->exactContextHnd));
}
- gtDispVN(tree);
- if (tree->IsMultiRegCall())
+ gtDispVN(call);
+ if (call->IsMultiRegCall())
{
- gtDispRegVal(tree);
+ gtDispRegVal(call);
}
printf("\n");
bufp = &buf[0];
- if ((tree->gtCall.gtCallObjp != nullptr) && (tree->gtCall.gtCallObjp->gtOper != GT_NOP) &&
- (!tree->gtCall.gtCallObjp->IsArgPlaceHolderNode()))
+ if ((call->gtCallObjp != nullptr) && (call->gtCallObjp->gtOper != GT_NOP) &&
+ (!call->gtCallObjp->IsArgPlaceHolderNode()))
{
- if (tree->gtCall.gtCallObjp->gtOper == GT_ASG)
+ if (call->gtCallObjp->gtOper == GT_ASG)
{
sprintf_s(bufp, sizeof(buf), "this SETUP%c", 0);
}
{
sprintf_s(bufp, sizeof(buf), "this in %s%c", compRegVarName(REG_ARG_0), 0);
}
- gtDispChild(tree->gtCall.gtCallObjp, indentStack,
- (tree->gtCall.gtCallObjp == lastChild) ? IIArcBottom : IIArc, bufp, topOnly);
+ gtDispChild(call->gtCallObjp, indentStack, (call->gtCallObjp == lastChild) ? IIArcBottom : IIArc,
+ bufp, topOnly);
}
- if (tree->gtCall.gtCallArgs)
+ if (call->gtCallArgs)
{
- gtDispArgList(tree, indentStack);
+ gtDispArgList(call, indentStack);
}
- if (tree->gtCall.gtCallType == CT_INDIRECT)
+ if (call->gtCallType == CT_INDIRECT)
{
- gtDispChild(tree->gtCall.gtCallAddr, indentStack,
- (tree->gtCall.gtCallAddr == lastChild) ? IIArcBottom : IIArc, "calli tgt", topOnly);
+ gtDispChild(call->gtCallAddr, indentStack, (call->gtCallAddr == lastChild) ? IIArcBottom : IIArc,
+ "calli tgt", topOnly);
}
- if (tree->gtCall.gtControlExpr != nullptr)
+ if (call->gtControlExpr != nullptr)
{
- gtDispChild(tree->gtCall.gtControlExpr, indentStack,
- (tree->gtCall.gtControlExpr == lastChild) ? IIArcBottom : IIArc, "control expr",
- topOnly);
+ gtDispChild(call->gtControlExpr, indentStack,
+ (call->gtControlExpr == lastChild) ? IIArcBottom : IIArc, "control expr", topOnly);
}
#if !FEATURE_FIXED_OUT_ARGS
- regList list = tree->gtCall.regArgList;
+ regList list = call->regArgList;
#endif
/* process the late argument list */
int lateArgIndex = 0;
- for (GenTreeArgList* lateArgs = tree->gtCall.gtCallLateArgs; lateArgs;
+ for (GenTreeArgList* lateArgs = call->gtCallLateArgs; lateArgs;
(lateArgIndex++, lateArgs = lateArgs->Rest()))
{
GenTreePtr argx;
argx = lateArgs->Current();
IndentInfo arcType = (lateArgs->Rest() == nullptr) ? IIArcBottom : IIArc;
- gtGetLateArgMsg(tree, argx, lateArgIndex, -1, bufp, sizeof(buf));
+ gtGetLateArgMsg(call, argx, lateArgIndex, -1, bufp, sizeof(buf));
gtDispChild(argx, indentStack, arcType, bufp, topOnly);
}
}
// 'arg' must be an argument to 'call' (else gtArgEntryByNode will assert)
void Compiler::gtGetArgMsg(
- GenTreePtr call, GenTreePtr arg, unsigned argNum, int listCount, char* bufp, unsigned bufLength)
+ GenTreeCall* call, GenTreePtr arg, unsigned argNum, int listCount, char* bufp, unsigned bufLength)
{
- if (call->gtCall.gtCallLateArgs != nullptr)
+ if (call->gtCallLateArgs != nullptr)
{
fgArgTabEntryPtr curArgTabEntry = gtArgEntryByArgNum(call, argNum);
assert(curArgTabEntry);
// 'arg' must be an argument to 'call' (else gtArgEntryByNode will assert)
void Compiler::gtGetLateArgMsg(
- GenTreePtr call, GenTreePtr argx, int lateArgIndex, int listCount, char* bufp, unsigned bufLength)
+ GenTreeCall* call, GenTreePtr argx, int lateArgIndex, int listCount, char* bufp, unsigned bufLength)
{
assert(!argx->IsArgPlaceHolderNode()); // No place holders nodes are in gtCallLateArgs;
regNumber argReg = curArgTabEntry->regNum;
#if !FEATURE_FIXED_OUT_ARGS
- assert(lateArgIndex < call->gtCall.regArgListCount);
- assert(argReg == call->gtCall.regArgList[lateArgIndex]);
+ assert(lateArgIndex < call->regArgListCount);
+ assert(argReg == call->regArgList[lateArgIndex]);
#else
if (argReg == REG_STK)
{
// gtDispArgList: Dump the tree for a call arg list
//
// Arguments:
-// tree - The call for which 'arg' is an argument
+// call - The call to dump arguments for
// indentStack - the specification for the current level of indentation & arcs
//
// Return Value:
// None.
//
-// Assumptions:
-// 'tree' must be a call node
-
-void Compiler::gtDispArgList(GenTreePtr tree, IndentStack* indentStack)
+void Compiler::gtDispArgList(GenTreeCall* call, IndentStack* indentStack)
{
- GenTree* args = tree->gtCall.gtCallArgs;
+ GenTree* args = call->gtCallArgs;
unsigned argnum = 0;
const int BufLength = 256;
char buf[BufLength];
char* bufp = &buf[0];
- unsigned numChildren = tree->NumChildren();
+ unsigned numChildren = call->NumChildren();
assert(numChildren != 0);
- bool argListIsLastChild = (args == tree->GetChild(numChildren - 1));
+ bool argListIsLastChild = (args == call->GetChild(numChildren - 1));
IndentInfo arcType = IIArc;
- if (tree->gtCall.gtCallObjp != nullptr)
+ if (call->gtCallObjp != nullptr)
{
argnum++;
}
GenTree* arg = args->gtOp.gtOp1;
if (!arg->IsNothingNode() && !arg->IsArgPlaceHolderNode())
{
- gtGetArgMsg(tree, arg, argnum, -1, bufp, BufLength);
+ gtGetArgMsg(call, arg, argnum, -1, bufp, BufLength);
if (argListIsLastChild && (args->gtOp.gtOp2 == nullptr))
{
arcType = IIArcBottom;
{
if (tree->gtCall.gtCallType == CT_HELPER)
{
- if (gtIsTypeHandleToRuntimeTypeHelper(tree))
+ if (gtIsTypeHandleToRuntimeTypeHelper(tree->AsCall()))
{
return true;
}
return false;
}
-bool Compiler::gtIsTypeHandleToRuntimeTypeHelper(GenTreePtr tree)
+bool Compiler::gtIsTypeHandleToRuntimeTypeHelper(GenTreeCall* call)
{
- return tree->gtCall.gtCallMethHnd == eeFindHelper(CORINFO_HELP_TYPEHANDLE_TO_RUNTIMETYPE) ||
- tree->gtCall.gtCallMethHnd == eeFindHelper(CORINFO_HELP_TYPEHANDLE_TO_RUNTIMETYPE_MAYBENULL);
+ return call->gtCallMethHnd == eeFindHelper(CORINFO_HELP_TYPEHANDLE_TO_RUNTIMETYPE) ||
+ call->gtCallMethHnd == eeFindHelper(CORINFO_HELP_TYPEHANDLE_TO_RUNTIMETYPE_MAYBENULL);
}
bool Compiler::gtIsActiveCSE_Candidate(GenTreePtr tree)
var_types type,
unsigned slotNum PUT_STRUCT_ARG_STK_ONLY_ARG(unsigned numSlots)
PUT_STRUCT_ARG_STK_ONLY_ARG(bool isStruct),
- bool _putInIncomingArgArea = false DEBUGARG(GenTreePtr callNode = nullptr)
+ bool _putInIncomingArgArea = false DEBUGARG(GenTreeCall* callNode = nullptr)
DEBUGARG(bool largeNode = false))
: GenTreeUnOp(oper, type DEBUGARG(largeNode))
, gtSlotNum(slotNum)
var_types type,
GenTreePtr op1,
unsigned slotNum PUT_STRUCT_ARG_STK_ONLY_ARG(unsigned numSlots),
- bool _putInIncomingArgArea = false DEBUGARG(GenTreePtr callNode = nullptr)
+ bool _putInIncomingArgArea = false DEBUGARG(GenTreeCall* callNode = nullptr)
DEBUGARG(bool largeNode = false))
: GenTreeUnOp(oper, type, op1 DEBUGARG(largeNode))
, gtSlotNum(slotNum)
GenTreePutArgStk(genTreeOps oper,
var_types type,
unsigned slotNum PUT_STRUCT_ARG_STK_ONLY_ARG(unsigned numSlots)
- DEBUGARG(GenTreePtr callNode = NULL) DEBUGARG(bool largeNode = false))
+ DEBUGARG(GenTreeCall* callNode = NULL) DEBUGARG(bool largeNode = false))
: GenTreeUnOp(oper, type DEBUGARG(largeNode))
, gtSlotNum(slotNum)
#if defined(UNIX_X86_ABI)
var_types type,
GenTreePtr op1,
unsigned slotNum PUT_STRUCT_ARG_STK_ONLY_ARG(unsigned numSlots)
- DEBUGARG(GenTreePtr callNode = NULL) DEBUGARG(bool largeNode = false))
+ DEBUGARG(GenTreeCall* callNode = NULL) DEBUGARG(bool largeNode = false))
: GenTreeUnOp(oper, type, op1 DEBUGARG(largeNode))
, gtSlotNum(slotNum)
#if defined(UNIX_X86_ABI)
#endif // FEATURE_PUT_STRUCT_ARG_STK
#ifdef DEBUG
- GenTreePtr gtCall; // the call node to which this argument belongs
+ GenTreeCall* gtCall; // the call node to which this argument belongs
#endif
#ifdef FEATURE_PUT_STRUCT_ARG_STK
}
else if (src->gtOper == GT_RET_EXPR)
{
- GenTreePtr call = src->gtRetExpr.gtInlineCandidate;
+ GenTreeCall* call = src->gtRetExpr.gtInlineCandidate->AsCall();
noway_assert(call->gtOper == GT_CALL);
- if (call->AsCall()->HasRetBufArg())
+ if (call->HasRetBufArg())
{
// insert the return value buffer into the argument list as first byref parameter
- call->gtCall.gtCallArgs = gtNewListNode(destAddr, call->gtCall.gtCallArgs);
+ call->gtCallArgs = gtNewListNode(destAddr, call->gtCallArgs);
// now returns void, not a struct
src->gtType = TYP_VOID;
{
// Case of inline method returning a struct in one or more registers.
//
- var_types returnType = (var_types)call->gtCall.gtReturnType;
+ var_types returnType = (var_types)call->gtReturnType;
// We won't need a return buffer
asgType = returnType;
return gtNewIconEmbHndNode(handle, pIndirection, handleFlags, 0, nullptr, compileTimeHandle);
}
-GenTreePtr Compiler::impReadyToRunHelperToTree(
+GenTreeCall* Compiler::impReadyToRunHelperToTree(
CORINFO_RESOLVED_TOKEN* pResolvedToken,
CorInfoHelpFunc helper,
var_types type,
info.compCompHnd->getReadyToRunHelper(pResolvedToken, helper, &lookup);
#endif
- GenTreePtr op1 = gtNewHelperCallNode(helper, type, GTF_EXCEPT, args);
+ GenTreeCall* op1 = gtNewHelperCallNode(helper, type, GTF_EXCEPT, args);
- op1->gtCall.setEntryPoint(lookup);
+ op1->setEntryPoint(lookup);
return op1;
}
case CORINFO_INTRINSIC_GetTypeFromHandle:
op1 = impStackTop(0).val;
if (op1->gtOper == GT_CALL && (op1->gtCall.gtCallType == CT_HELPER) &&
- gtIsTypeHandleToRuntimeTypeHelper(op1))
+ gtIsTypeHandleToRuntimeTypeHelper(op1->AsCall()))
{
op1 = impPopStack().val;
// Change call to return RuntimeType directly.
case CORINFO_INTRINSIC_RTH_GetValueInternal:
op1 = impStackTop(0).val;
if (op1->gtOper == GT_CALL && (op1->gtCall.gtCallType == CT_HELPER) &&
- gtIsTypeHandleToRuntimeTypeHelper(op1))
+ gtIsTypeHandleToRuntimeTypeHelper(op1->AsCall()))
{
// Old tree
// Helper-RuntimeTypeHandle -> TreeToGetNativeTypeHandle
// If GTF_CALL_UNMANAGED is set, increments info.compCallUnmanaged
void Compiler::impCheckForPInvokeCall(
- GenTreePtr call, CORINFO_METHOD_HANDLE methHnd, CORINFO_SIG_INFO* sig, unsigned mflags, BasicBlock* block)
+ GenTreeCall* call, CORINFO_METHOD_HANDLE methHnd, CORINFO_SIG_INFO* sig, unsigned mflags, BasicBlock* block)
{
CorInfoUnmanagedCallConv unmanagedCallConv;
// If VM flagged it as Pinvoke, flag the call node accordingly
if ((mflags & CORINFO_FLG_PINVOKE) != 0)
{
- call->gtCall.gtCallMoreFlags |= GTF_CALL_M_PINVOKE;
+ call->gtCallMoreFlags |= GTF_CALL_M_PINVOKE;
}
if (methHnd)
static_assert_no_msg((unsigned)CORINFO_CALLCONV_THISCALL == (unsigned)CORINFO_UNMANAGED_CALLCONV_THISCALL);
unmanagedCallConv = CorInfoUnmanagedCallConv(callConv);
- assert(!call->gtCall.gtCallCookie);
+ assert(!call->gtCallCookie);
}
if (unmanagedCallConv != CORINFO_UNMANAGED_CALLCONV_C && unmanagedCallConv != CORINFO_UNMANAGED_CALLCONV_STDCALL &&
if (unmanagedCallConv == CORINFO_UNMANAGED_CALLCONV_THISCALL)
{
- call->gtCall.gtCallMoreFlags |= GTF_CALL_M_UNMGD_THISCALL;
+ call->gtCallMoreFlags |= GTF_CALL_M_UNMGD_THISCALL;
}
}
-GenTreePtr Compiler::impImportIndirectCall(CORINFO_SIG_INFO* sig, IL_OFFSETX ilOffset)
+GenTreeCall* Compiler::impImportIndirectCall(CORINFO_SIG_INFO* sig, IL_OFFSETX ilOffset)
{
var_types callRetTyp = JITtype2varType(sig->retType);
/* Create the call node */
- GenTreePtr call = gtNewIndCallNode(fptr, callRetTyp, nullptr, ilOffset);
+ GenTreeCall* call = gtNewIndCallNode(fptr, callRetTyp, nullptr, ilOffset);
call->gtFlags |= GTF_EXCEPT | (fptr->gtFlags & GTF_GLOB_EFFECT);
{
// New lexical block here to avoid compilation errors because of GOTOs.
BasicBlock* block = compIsForInlining() ? impInlineInfo->iciBlock : compCurBB;
- impCheckForPInvokeCall(call, methHnd, sig, mflags, block);
+ impCheckForPInvokeCall(call->AsCall(), methHnd, sig, mflags, block);
}
if (call->gtFlags & GTF_CALL_UNMANAGED)
{
// New inliner morph it in impImportCall.
// This will allow us to inline the call to the delegate constructor.
- call = fgOptimizeDelegateConstructor(call, &exactContextHnd);
+ call = fgOptimizeDelegateConstructor(call->AsCall(), &exactContextHnd);
}
if (!bIntrinsicImported)
bool fatPointerCandidate = call->AsCall()->IsFatPointerCandidate();
if (varTypeIsStruct(callRetTyp))
{
- call = impFixupCallStructReturn(call, sig->retTypeClass);
+ call = impFixupCallStructReturn(call->AsCall(), sig->retTypeClass);
}
if ((call->gtFlags & GTF_CALL_INLINE_CANDIDATE) != 0)
// Return Value:
// Returns new GenTree node after fixing struct return of call node
//
-GenTreePtr Compiler::impFixupCallStructReturn(GenTreePtr call, CORINFO_CLASS_HANDLE retClsHnd)
+GenTreePtr Compiler::impFixupCallStructReturn(GenTreeCall* call, CORINFO_CLASS_HANDLE retClsHnd)
{
- assert(call->gtOper == GT_CALL);
-
if (!varTypeIsStruct(call))
{
return call;
}
- call->gtCall.gtRetClsHnd = retClsHnd;
-
- GenTreeCall* callNode = call->AsCall();
+ call->gtRetClsHnd = retClsHnd;
#if FEATURE_MULTIREG_RET
// Initialize Return type descriptor of call node
- ReturnTypeDesc* retTypeDesc = callNode->GetReturnTypeDesc();
+ ReturnTypeDesc* retTypeDesc = call->GetReturnTypeDesc();
retTypeDesc->InitializeStructReturnType(this, retClsHnd);
#endif // FEATURE_MULTIREG_RET
#ifdef FEATURE_UNIX_AMD64_STRUCT_PASSING
// Not allowed for FEATURE_CORCLR which is the only SKU available for System V OSs.
- assert(!callNode->IsVarargs() && "varargs not allowed for System V OSs.");
+ assert(!call->IsVarargs() && "varargs not allowed for System V OSs.");
// The return type will remain as the incoming struct type unless normalized to a
// single eightbyte return type below.
- callNode->gtReturnType = call->gtType;
+ call->gtReturnType = call->gtType;
unsigned retRegCount = retTypeDesc->GetReturnRegCount();
if (retRegCount != 0)
if (retRegCount == 1)
{
// struct returned in a single register
- callNode->gtReturnType = retTypeDesc->GetReturnRegType(0);
+ call->gtReturnType = retTypeDesc->GetReturnRegType(0);
}
else
{
// must be a struct returned in two registers
assert(retRegCount == 2);
- if ((!callNode->CanTailCall()) && (!callNode->IsInlineCandidate()))
+ if ((!call->CanTailCall()) && (!call->IsInlineCandidate()))
{
// Force a call returning multi-reg struct to be always of the IR form
// tmp = call
else
{
// struct not returned in registers i.e returned via hiddden retbuf arg.
- callNode->gtCallMoreFlags |= GTF_CALL_M_RETBUFFARG;
+ call->gtCallMoreFlags |= GTF_CALL_M_RETBUFFARG;
}
#else // not FEATURE_UNIX_AMD64_STRUCT_PASSING
// There is no fixup necessary if the return type is a HFA struct.
// HFA structs are returned in registers for ARM32 and ARM64
//
- if (!call->gtCall.IsVarargs() && IsHfa(retClsHnd))
+ if (!call->IsVarargs() && IsHfa(retClsHnd))
{
- if (call->gtCall.CanTailCall())
+ if (call->CanTailCall())
{
if (info.compIsVarArgs)
{
// We cannot tail call because control needs to return to fixup the calling
// convention for result return.
- call->gtCall.gtCallMoreFlags &= ~GTF_CALL_M_EXPLICIT_TAILCALL;
+ call->gtCallMoreFlags &= ~GTF_CALL_M_EXPLICIT_TAILCALL;
}
else
{
if (howToReturnStruct == SPK_ByReference)
{
assert(returnType == TYP_UNKNOWN);
- call->gtCall.gtCallMoreFlags |= GTF_CALL_M_RETBUFFARG;
+ call->gtCallMoreFlags |= GTF_CALL_M_RETBUFFARG;
}
else
{
assert(returnType != TYP_UNKNOWN);
- call->gtCall.gtReturnType = returnType;
+ call->gtReturnType = returnType;
// ToDo: Refactor this common code sequence into its own method as it is used 4+ times
if ((returnType == TYP_LONG) && (compLongUsed == false))
if (retRegCount >= 2)
{
- if ((!callNode->CanTailCall()) && (!callNode->IsInlineCandidate()))
+ if ((!call->CanTailCall()) && (!call->IsInlineCandidate()))
{
// Force a call returning multi-reg struct to be always of the IR form
// tmp = call
#ifdef FEATURE_READYTORUN_COMPILER
if (opts.IsReadyToRun())
{
- GenTreePtr opLookup =
+ GenTreeCall* opLookup =
impReadyToRunHelperToTree(&resolvedToken, CORINFO_HELP_READYTORUN_ISINSTANCEOF, TYP_REF,
gtNewArgList(op1));
usingReadyToRunHelper = (opLookup != nullptr);
#ifdef FEATURE_READYTORUN_COMPILER
if (opts.IsReadyToRun())
{
- GenTreePtr opLookup = impReadyToRunHelperToTree(&resolvedToken, CORINFO_HELP_READYTORUN_CHKCAST,
- TYP_REF, gtNewArgList(op1));
+ GenTreeCall* opLookup = impReadyToRunHelperToTree(&resolvedToken, CORINFO_HELP_READYTORUN_CHKCAST,
+ TYP_REF, gtNewArgList(op1));
usingReadyToRunHelper = (opLookup != nullptr);
op1 = (usingReadyToRunHelper ? opLookup : op1);
// compRetNativeType is TYP_STRUCT.
// This implies that struct return via RetBuf arg or multi-reg struct return
- GenTreePtr iciCall = impInlineInfo->iciCall;
- assert(iciCall->gtOper == GT_CALL);
+ GenTreeCall* iciCall = impInlineInfo->iciCall->AsCall();
// Assign the inlinee return into a spill temp.
// spill temp only exists if there are multiple return points
if (retRegCount != 0)
{
- assert(!iciCall->AsCall()->HasRetBufArg());
+ assert(!iciCall->HasRetBufArg());
assert(retRegCount >= 2);
if (lvaInlineeReturnSpillTemp != BAD_VAR_NUM)
{
else
#endif // defined(_TARGET_ARM64_)
{
- assert(iciCall->AsCall()->HasRetBufArg());
- GenTreePtr dest = gtCloneExpr(iciCall->gtCall.gtCallArgs->gtOp.gtOp1);
+ assert(iciCall->HasRetBufArg());
+ GenTreePtr dest = gtCloneExpr(iciCall->gtCallArgs->gtOp.gtOp1);
// spill temp only exists if there are multiple return points
if (lvaInlineeReturnSpillTemp != BAD_VAR_NUM)
{
*/
// clang-format off
-void CodeGen::instEmit_indCall(GenTreePtr call,
+void CodeGen::instEmit_indCall(GenTreeCall* call,
size_t argSize,
emitAttr retSize
MULTIREG_HAS_SECOND_GC_RET_ONLY_ARG(emitAttr secondRetSize))
CORINFO_SIG_INFO* sigInfo = nullptr;
- assert(call->gtOper == GT_CALL);
-
/* Get hold of the function address */
- assert(call->gtCall.gtCallType == CT_INDIRECT);
- addr = call->gtCall.gtCallAddr;
+ assert(call->gtCallType == CT_INDIRECT);
+ addr = call->gtCallAddr;
assert(addr);
#ifdef DEBUG
// Pass the call signature information from the GenTree node so the emitter can associate
// native call sites with the signatures they were generated from.
- sigInfo = call->gtCall.callSig;
+ sigInfo = call->callSig;
#endif // DEBUG
#if CPU_LOAD_STORE_ARCH
INDEBUG(bool yes =)
genCreateAddrMode(addr, -1, true, RBM_NONE, &rev, &rv1, &rv2, &mul, &cns);
- INDEBUG(PREFIX_ASSUME(yes)); // since we have called genMakeAddressable() on call->gtCall.gtCallAddr
+ INDEBUG(PREFIX_ASSUME(yes)); // since we have called genMakeAddressable() on call->gtCallAddr
/* Get the additional operands if any */
}
#endif
-fgArgInfo::fgArgInfo(Compiler* comp, GenTreePtr call, unsigned numArgs)
+fgArgInfo::fgArgInfo(Compiler* comp, GenTreeCall* call, unsigned numArgs)
{
- compiler = comp;
- callTree = call;
- assert(call->IsCall());
+ compiler = comp;
+ callTree = call;
argCount = 0; // filled in arg count, starts at zero
nextSlotNum = INIT_ARG_STACK_SLOT;
stkLevel = 0;
* in the argTable contains pointers that must point to the
* new arguments and not the old arguments.
*/
-fgArgInfo::fgArgInfo(GenTreePtr newCall, GenTreePtr oldCall)
+fgArgInfo::fgArgInfo(GenTreeCall* newCall, GenTreeCall* oldCall)
{
- assert(oldCall->IsCall());
- assert(newCall->IsCall());
-
fgArgInfoPtr oldArgInfo = oldCall->gtCall.fgArgInfo;
- compiler = oldArgInfo->compiler;
- ;
- callTree = newCall;
- assert(newCall->IsCall());
+ compiler = oldArgInfo->compiler;
+ callTree = newCall;
argCount = 0; // filled in arg count, starts at zero
nextSlotNum = INIT_ARG_STACK_SLOT;
stkLevel = oldArgInfo->stkLevel;
// so we can iterate over these argument lists more uniformly.
// Need to provide a temporary non-null first arguments to these constructors: if we use them, we'll replace them
GenTreeArgList* newArgs;
- GenTreeArgList newArgObjp(newCall, newCall->gtCall.gtCallArgs);
+ GenTreeArgList newArgObjp(newCall, newCall->gtCallArgs);
GenTreeArgList* oldArgs;
- GenTreeArgList oldArgObjp(oldCall, oldCall->gtCall.gtCallArgs);
+ GenTreeArgList oldArgObjp(oldCall, oldCall->gtCallArgs);
- if (newCall->gtCall.gtCallObjp == nullptr)
+ if (newCall->gtCallObjp == nullptr)
{
- assert(oldCall->gtCall.gtCallObjp == nullptr);
- newArgs = newCall->gtCall.gtCallArgs;
- oldArgs = oldCall->gtCall.gtCallArgs;
+ assert(oldCall->gtCallObjp == nullptr);
+ newArgs = newCall->gtCallArgs;
+ oldArgs = oldCall->gtCallArgs;
}
else
{
- assert(oldCall->gtCall.gtCallObjp != nullptr);
- newArgObjp.Current() = newCall->gtCall.gtCallArgs;
+ assert(oldCall->gtCallObjp != nullptr);
+ newArgObjp.Current() = newCall->gtCallArgs;
newArgs = &newArgObjp;
- oldArgObjp.Current() = oldCall->gtCall.gtCallObjp;
+ oldArgObjp.Current() = oldCall->gtCallObjp;
oldArgs = &oldArgObjp;
}
if (scanRegArgs)
{
- newArgs = newCall->gtCall.gtCallLateArgs;
- oldArgs = oldCall->gtCall.gtCallLateArgs;
+ newArgs = newCall->gtCallLateArgs;
+ oldArgs = oldCall->gtCallLateArgs;
while (newArgs)
{
#pragma warning(push)
#pragma warning(disable : 21000) // Suppress PREFast warning about overly large function
#endif
-GenTreeCall* Compiler::fgMorphArgs(GenTreeCall* callNode)
+GenTreeCall* Compiler::fgMorphArgs(GenTreeCall* call)
{
- GenTreeCall* call = callNode->AsCall();
-
GenTreePtr args;
GenTreePtr argx;
GenTreePtr pGetType;
#ifdef LEGACY_BACKEND
- bool bOp1ClassFromHandle = gtIsTypeHandleToRuntimeTypeHelper(op1);
- bool bOp2ClassFromHandle = gtIsTypeHandleToRuntimeTypeHelper(op2);
+ bool bOp1ClassFromHandle = gtIsTypeHandleToRuntimeTypeHelper(op1->AsCall());
+ bool bOp2ClassFromHandle = gtIsTypeHandleToRuntimeTypeHelper(op2->AsCall());
#else
- bool bOp1ClassFromHandle = op1->gtOper == GT_CALL ? gtIsTypeHandleToRuntimeTypeHelper(op1) : false;
- bool bOp2ClassFromHandle = op2->gtOper == GT_CALL ? gtIsTypeHandleToRuntimeTypeHelper(op2) : false;
+ bool bOp1ClassFromHandle =
+ op1->gtOper == GT_CALL ? gtIsTypeHandleToRuntimeTypeHelper(op1->AsCall()) : false;
+ bool bOp2ClassFromHandle =
+ op2->gtOper == GT_CALL ? gtIsTypeHandleToRuntimeTypeHelper(op2->AsCall()) : false;
#endif
// Optimize typeof(...) == typeof(...)
* Determine the kind of interference for the call.
*/
-/* static */ inline Compiler::callInterf Compiler::optCallInterf(GenTreePtr call)
+/* static */ inline Compiler::callInterf Compiler::optCallInterf(GenTreeCall* call)
{
- assert(call->gtOper == GT_CALL);
-
// if not a helper, kills everything
- if (call->gtCall.gtCallType != CT_HELPER)
+ if (call->gtCallType != CT_HELPER)
{
return CALLINT_ALL;
}
// setfield and array address store kill all indirections
- switch (eeGetHelperNum(call->gtCall.gtCallMethHnd))
+ switch (eeGetHelperNum(call->gtCallMethHnd))
{
case CORINFO_HELP_ASSIGN_REF: // Not strictly needed as we don't make a GT_CALL with this
case CORINFO_HELP_CHECKED_ASSIGN_REF: // Not strictly needed as we don't make a GT_CALL with this
isVarAssgDsc* desc = (isVarAssgDsc*)data->pCallbackData;
assert(desc && desc->ivaSelf == desc);
- desc->ivaMaskCall = optCallInterf(tree);
+ desc->ivaMaskCall = optCallInterf(tree->AsCall());
}
return WALK_CONTINUE;
assert(!args->IsArgPlaceHolderNode()); // No place holders nodes are in gtCallLateArgs;
- fgArgTabEntryPtr curArgTabEntry = gtArgEntryByNode(tree, args);
+ fgArgTabEntryPtr curArgTabEntry = gtArgEntryByNode(tree->AsCall(), args);
assert(curArgTabEntry);
regNumber regNum = curArgTabEntry->regNum; // first register use to pass this argument
else
{
assert(oper == GT_CALL);
- genCodeForCall(tree, true);
+ genCodeForCall(tree->AsCall(), true);
}
}
*
* Spill the top of the FP x87 stack.
*/
-void RegSet::rsSpillFPStack(GenTreePtr tree)
+void RegSet::rsSpillFPStack(GenTreeCall* call)
{
SpillDsc* spill;
TempDsc* temp;
- var_types treeType = tree->TypeGet();
+ var_types treeType = call->TypeGet();
- assert(tree->OperGet() == GT_CALL);
spill = SpillDsc::alloc(m_rsCompiler, this, treeType);
/* Grab a temp to store the spilled value */
/* Remember what it is we have spilled */
- spill->spillTree = tree;
+ spill->spillTree = call;
SpillDsc* lastDsc = spill;
- regNumber reg = tree->gtRegNum;
+ regNumber reg = call->gtRegNum;
lastDsc->spillNext = rsSpillDesc[reg];
rsSpillDesc[reg] = spill;
/* Mark the tree node as having been spilled */
- rsMarkSpill(tree, reg);
+ rsMarkSpill(call, reg);
}
#endif // defined(_TARGET_X86_) && !FEATURE_STACK_FP_X87
void rsSpillTree(regNumber reg, GenTreePtr tree, unsigned regIdx = 0);
#if defined(_TARGET_X86_) && !FEATURE_STACK_FP_X87
- void rsSpillFPStack(GenTreePtr tree);
+ void rsSpillFPStack(GenTreeCall* call);
#endif // defined(_TARGET_X86_) && !FEATURE_STACK_FP_X87
#ifdef LEGACY_BACKEND
{
case GT_CALL:
{
- genCodeForCall(tree, true);
+ genCodeForCall(tree->AsCall(), true);
break;
}
default: