* Fix another dynamically-sized allocation to use new/delete instead of the mismatched new[]/delete.
* Fix use-after-scope
* Fix another alloc-dealloc mismatch
* Update src/coreclr/vm/threadstatics.cpp
Co-authored-by: Jan Kotas <jkotas@microsoft.com>
* Use standard size_t instead of custom SIZE_T typedef.
* Fix formatting.
Co-authored-by: Jan Kotas <jkotas@microsoft.com>
noway_assert(op1->IsMultiRegCall() || op1->IsMultiRegLclVar());
int srcCount;
+ ReturnTypeDesc nonCallRetTypeDesc;
const ReturnTypeDesc* pRetTypeDesc;
if (op1->OperIs(GT_CALL))
{
else
{
assert(compiler->lvaEnregMultiRegVars);
- LclVarDsc* varDsc = compiler->lvaGetDesc(op1->AsLclVar()->GetLclNum());
- ReturnTypeDesc retTypeDesc;
- retTypeDesc.InitializeStructReturnType(compiler, varDsc->GetStructHnd(),
- compiler->info.compCallConv);
- pRetTypeDesc = &retTypeDesc;
+ LclVarDsc* varDsc = compiler->lvaGetDesc(op1->AsLclVar()->GetLclNum());
+ nonCallRetTypeDesc.InitializeStructReturnType(compiler, varDsc->GetStructHnd(),
+ compiler->info.compCallConv);
+ pRetTypeDesc = &nonCallRetTypeDesc;
assert(compiler->lvaGetDesc(op1->AsLclVar()->GetLclNum())->lvFieldCnt ==
- retTypeDesc.GetReturnRegCount());
+ nonCallRetTypeDesc.GetReturnRegCount());
}
srcCount = pRetTypeDesc->GetReturnRegCount();
// For any source that's coming from a different register file, we need to ensure that
SpinLock::Holder lock(&m_TLMTableLock);
// Free the table itself
- delete m_pTLMTable;
+ delete[] m_pTLMTable;
m_pTLMTable = NULL;
}
// If this allocation fails, we will throw. If it succeeds,
// then we are good to go
- PTR_TLMTableEntry pNewModuleSlots = (PTR_TLMTableEntry) (void*) new BYTE[sizeof(TLMTableEntry) * aModuleIndices];
+ PTR_TLMTableEntry pNewModuleSlots = new TLMTableEntry[aModuleIndices];
// Zero out the new TLM table
memset(pNewModuleSlots, 0 , sizeof(TLMTableEntry) * aModuleIndices);
SIZE_T size = pModule->GetThreadLocalModuleSize();
- _ASSERTE(size >= ThreadLocalModule::OffsetOfDataBlob());
-
- PTR_ThreadLocalModule pThreadLocalModule = (ThreadLocalModule*)new BYTE[size];
+ PTR_ThreadLocalModule pThreadLocalModule = new({ pModule }) ThreadLocalModule;
// We guarantee alignment for 64-bit regular thread statics on 32-bit platforms even without FEATURE_64BIT_ALIGNMENT for performance reasons.
return GetPrecomputedStaticsClassData()[classID] & ClassInitFlags::INITIALIZED_FLAG;
}
+ void* operator new(size_t) = delete;
+
+ struct ParentModule { PTR_Module pModule; };
+
+ void* operator new(size_t baseSize, ParentModule parentModule)
+ {
+ size_t size = parentModule.pModule->GetThreadLocalModuleSize();
+
+ _ASSERTE(size >= baseSize);
+ _ASSERTE(size >= ThreadLocalModule::OffsetOfDataBlob());
+
+ return ::operator new(size);
+ }
+
#ifndef DACCESS_COMPILE
FORCEINLINE void EnsureClassAllocated(MethodTable * pMT)