// aren't any embedded buffers in the DebuggerIPCControlBlock).
#if defined(DBG_TARGET_X86) || defined(DBG_TARGET_ARM)
+#ifdef _WIN64
+#define CorDBIPC_BUFFER_SIZE (2096)
+#else
#define CorDBIPC_BUFFER_SIZE (2088) // hand tuned to ensure that ipc block in IPCHeader.h fits in 1 page.
+#endif
#else // !_TARGET_X86_ && !_TARGET_ARM_
// This is the size of a DebuggerIPCEvent. You will hit an assert in Cordb::Initialize() (DI\process.cpp)
// if this is not defined correctly. AMD64 actually has a page size of 0x1000, not 0x2000.
ArmVolatileContextPointer volatileCurrContextPointers;
DWORD * pPC; // processor neutral name
-
+#ifndef CROSSGEN_COMPILE
REGDISPLAY()
{
// Initialize regdisplay
// Setup the pointer to ControlPC field
pPC = &ControlPC;
}
+#else
+private:
+ REGDISPLAY();
+#endif
};
// This function tells us if the given stack pointer is in one of the frames of the functions called by the given frame
#ifndef ALLOC_ALIGN_CONSTANT
-#define ALLOC_ALIGN_CONSTANT ((1<<LOG2_PTRSIZE)-1)
+#define ALLOC_ALIGN_CONSTANT (sizeof(void*)-1)
#endif
// as large as the largest FieldMarshaler subclass. This requirement
// is guarded by an assert.
//=======================================================================
+#ifdef _WIN64
+#define MAXFIELDMARSHALERSIZE 40
+#else
#define MAXFIELDMARSHALERSIZE 24
+#endif
//**********************************************************************
// Parameter size
return (sizeofvaluetype > 4);
}
+#ifdef _MSC_VER
+#pragma warning(push)
+#pragma warning(disable:4359) // Prevent "warning C4359: 'UMEntryThunkCode': Alignment specifier is less than actual alignment (8), and will be ignored." in crossbitness scenario
+#endif // _MSC_VER
+
struct DECLSPEC_ALIGN(4) UMEntryThunkCode
{
WORD m_code[4];
}
};
+#ifdef _MSC_VER
+#pragma warning(pop)
+#endif // _MSC_VER
+
struct HijackArgs
{
union
EXTERN_C VOID STDCALL PrecodeFixupThunk();
-#define PRECODE_ALIGNMENT CODE_SIZE_ALIGN
+#define PRECODE_ALIGNMENT sizeof(void*)
#define SIZEOF_PRECODE_BASE CODE_SIZE_ALIGN
#define OFFSETOF_PRECODE_TYPE 0
ThumbEmitEpilog();
}
+#ifndef CROSSGEN_COMPILE
+
void StubLinkerCPU::ThumbEmitCallManagedMethod(MethodDesc *pMD, bool fTailcall)
{
bool isRelative = MethodTable::VTableIndir2_t::isRelative
}
}
-#ifndef CROSSGEN_COMPILE
// Common code used to generate either an instantiating method stub or an unboxing stub (in the case where the
// unboxing stub also needs to provide a generic instantiation parameter). The stub needs to add the
// instantiation parameter provided in pHiddenArg and re-arrange the rest of the incoming arguments as a
}
#endif // !DACCESS_COMPILE
-#endif // !CROSSGEN_COMPILE
void FaultingExceptionFrame::UpdateRegDisplay(const PREGDISPLAY pRD)
{
SyncRegDisplayToCurrentContext(pRD);
}
-#endif
+#endif // FEATURE_HIJACK
+#endif // !CROSSGEN_COMPILE
class UMEntryThunk * UMEntryThunk::Decode(void *pCallback)
{
// Doubles or HFAs containing doubles need the stack aligned appropriately.
if (fRequiresAlign64Bit)
- m_idxStack = ALIGN_UP(m_idxStack, 2);
+ m_idxStack = (int)ALIGN_UP(m_idxStack, 2);
// Indicate the stack location of the argument to the caller.
int argOfs = TransitionBlock::GetOffsetOfArgs() + m_idxStack * 4;
{
// The argument requires 64-bit alignment. Align either the next general argument register if
// we have any left. See step C.3 in the algorithm in the ABI spec.
- m_idxGenReg = ALIGN_UP(m_idxGenReg, 2);
+ m_idxGenReg = (int)ALIGN_UP(m_idxGenReg, 2);
}
int argOfs = TransitionBlock::GetOffsetOfArgumentRegisters() + m_idxGenReg * 4;
{
// The argument requires 64-bit alignment. If it is going to be passed on the stack, align
// the next stack slot. See step C.6 in the algorithm in the ABI spec.
- m_idxStack = ALIGN_UP(m_idxStack, 2);
+ m_idxStack = (int)ALIGN_UP(m_idxStack, 2);
}
int argOfs = TransitionBlock::GetOffsetOfArgs() + m_idxStack * 4;
#include "clrvarargs.h" /* for VARARG C_ASSERTs in asmconstants.h */
class CheckAsmOffsets
{
+#ifndef CROSSBITNESS_COMPILE
#define ASMCONSTANTS_C_ASSERT(cond) static_assert(cond, #cond);
#include "asmconstants.h"
+#endif // CROSSBITNESS_COMPILE
};
//-------------------------------------------------------------------------------
*pUnwindCodes++ = (BYTE)0xFF; // end
}
- int epilogUnwindCodeIndex = 0;
+ ptrdiff_t epilogUnwindCodeIndex = 0;
//epilog differs from prolog
if(m_cbStackFrame >= 4096)
}
// Number of 32-bit unwind codes
- int codeWordsCount = (ALIGN_UP((size_t)pUnwindCodes, sizeof(void*)) - (size_t)pUnwindInfo - sizeof(DWORD))/4;
+ size_t codeWordsCount = (ALIGN_UP((size_t)pUnwindCodes, sizeof(void*)) - (size_t)pUnwindInfo - sizeof(DWORD))/4;
_ASSERTE(epilogUnwindCodeIndex < 32);
*(DWORD *)pUnwindInfo =
((functionLength) / 2) |
(1 << 21) |
- (epilogUnwindCodeIndex << 23)|
- (codeWordsCount << 28);
+ ((int)epilogUnwindCodeIndex << 23)|
+ ((int)codeWordsCount << 28);
#elif defined(_TARGET_ARM64_)
if (!m_fProlog)