Permalink
Fetching contributors…
Cannot retrieve contributors at this time
12176 lines (10494 sloc) 480 KB
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
//
// File: METHODTABLEBUILDER.CPP
//
//
//
// ============================================================================
#include "common.h"
#include "methodtablebuilder.h"
#include "sigbuilder.h"
#include "dllimport.h"
#include "fieldmarshaler.h"
#include "encee.h"
#include "mdaassistants.h"
#include "ecmakey.h"
#include "customattribute.h"
#include "typestring.h"
//*******************************************************************************
// Helper functions to sort GCdescs by offset (decending order)
int __cdecl compareCGCDescSeries(const void *arg1, const void *arg2)
{
STATIC_CONTRACT_NOTHROW;
STATIC_CONTRACT_GC_NOTRIGGER;
STATIC_CONTRACT_FORBID_FAULT;
CGCDescSeries* gcInfo1 = (CGCDescSeries*) arg1;
CGCDescSeries* gcInfo2 = (CGCDescSeries*) arg2;
return (int)(gcInfo2->GetSeriesOffset() - gcInfo1->GetSeriesOffset());
}
//*******************************************************************************
const char* FormatSig(MethodDesc* pMD, LoaderHeap *pHeap, AllocMemTracker *pamTracker);
#ifdef _DEBUG
unsigned g_dupMethods = 0;
#endif // _DEBUG
//==========================================================================
// This function is very specific about how it constructs a EEClass. It first
// determines the necessary size of the vtable and the number of statics that
// this class requires. The necessary memory is then allocated for a EEClass
// and its vtable and statics. The class members are then initialized and
// the memory is then returned to the caller
//
// LPEEClass CreateClass()
//
// Parameters :
// [in] scope - scope of the current class not the one requested to be opened
// [in] cl - class token of the class to be created.
// [out] ppEEClass - pointer to pointer to hold the address of the EEClass
// allocated in this function.
// Return : returns an HRESULT indicating the success of this function.
//
// This parameter has been removed but might need to be reinstated if the
// global for the metadata loader is removed.
// [in] pIMLoad - MetaDataLoader class/object for the current scope.
//==========================================================================
/*static*/ EEClass *
MethodTableBuilder::CreateClass( Module *pModule,
mdTypeDef cl,
BOOL fHasLayout,
BOOL fDelegate,
BOOL fIsEnum,
const MethodTableBuilder::bmtGenericsInfo *bmtGenericsInfo,
LoaderAllocator * pAllocator,
AllocMemTracker *pamTracker)
{
CONTRACTL
{
STANDARD_VM_CHECK;
PRECONDITION(!(fHasLayout && fDelegate));
PRECONDITION(!(fHasLayout && fIsEnum));
PRECONDITION(CheckPointer(bmtGenericsInfo));
}
CONTRACTL_END;
EEClass *pEEClass = NULL;
IMDInternalImport *pInternalImport;
//<TODO>============================================================================
// vtabsize and static size need to be converted from pointer sizes to #'s
// of bytes this will be very important for 64 bit NT!
// We will need to call on IMetaDataLoad to get these sizes and fill out the
// tables
// From the classref call on metadata to resolve the classref and check scope
// to make sure that this class is in the same scope otherwise we need to open
// a new scope and possibly file.
// if the scopes are different call the code to load a new file and get the new scope
// scopes are the same so we can use the existing scope to get the class info
// This method needs to be fleshed out.more it currently just returns enough
// space for the defined EEClass and the vtable and statics are not set.
//=============================================================================</TODO>
if (fHasLayout)
{
pEEClass = new (pAllocator->GetLowFrequencyHeap(), pamTracker) LayoutEEClass();
}
else if (fDelegate)
{
pEEClass = new (pAllocator->GetLowFrequencyHeap(), pamTracker) DelegateEEClass();
}
else
{
pEEClass = new (pAllocator->GetLowFrequencyHeap(), pamTracker) EEClass(sizeof(EEClass));
}
DWORD dwAttrClass = 0;
mdToken tkExtends = mdTokenNil;
// Set up variance info
if (bmtGenericsInfo->pVarianceInfo)
{
// Variance info is an optional field on EEClass, so ensure the optional field descriptor has been
// allocated.
EnsureOptionalFieldsAreAllocated(pEEClass, pamTracker, pAllocator->GetLowFrequencyHeap());
pEEClass->SetVarianceInfo((BYTE*) pamTracker->Track(
pAllocator->GetHighFrequencyHeap()->AllocMem(S_SIZE_T(bmtGenericsInfo->GetNumGenericArgs()))));
memcpy(pEEClass->GetVarianceInfo(), bmtGenericsInfo->pVarianceInfo, bmtGenericsInfo->GetNumGenericArgs());
}
pInternalImport = pModule->GetMDImport();
if (pInternalImport == NULL)
COMPlusThrowHR(COR_E_TYPELOAD);
IfFailThrow(pInternalImport->GetTypeDefProps(
cl,
&dwAttrClass,
&tkExtends));
pEEClass->m_dwAttrClass = dwAttrClass;
// MDVal check: can't be both tdSequentialLayout and tdExplicitLayout
if((dwAttrClass & tdLayoutMask) == tdLayoutMask)
COMPlusThrowHR(COR_E_TYPELOAD);
if (IsTdInterface(dwAttrClass))
{
// MDVal check: must have nil tkExtends and must be tdAbstract
if((tkExtends & 0x00FFFFFF)||(!IsTdAbstract(dwAttrClass)))
COMPlusThrowHR(COR_E_TYPELOAD);
}
if (fHasLayout)
pEEClass->SetHasLayout();
#ifdef FEATURE_COMINTEROP
if (IsTdWindowsRuntime(dwAttrClass))
{
Assembly *pAssembly = pModule->GetAssembly();
// On the desktop CLR, we do not allow non-FX assemblies to use/define WindowsRuntimeImport attribute.
//
// On CoreCLR, however, we do allow non-FX assemblies to have this attribute. This enables scenarios where we can
// activate 3rd-party WinRT components outside AppContainer - 1st party WinRT components are already allowed
// to be activated outside AppContainer (on both Desktop and CoreCLR).
pEEClass->SetProjectedFromWinRT();
}
if (pEEClass->IsProjectedFromWinRT())
{
if (IsTdInterface(dwAttrClass))
{
//
// Check for GuidAttribute
//
BOOL bHasGuid = FALSE;
GUID guid;
HRESULT hr = pModule->GetMDImport()->GetItemGuid(cl, &guid);
IfFailThrow(hr);
if (IsEqualGUID(guid, GUID_NULL))
{
// A WinRT interface should have a GUID
pModule->GetAssembly()->ThrowTypeLoadException(pModule->GetMDImport(), cl, IDS_EE_WINRT_INTERFACE_WITHOUT_GUID);
}
}
}
WinMDAdapter::RedirectedTypeIndex redirectedTypeIndex;
redirectedTypeIndex = WinRTTypeNameConverter::GetRedirectedTypeIndexByName(pModule, cl);
if (redirectedTypeIndex != WinMDAdapter::RedirectedTypeIndex_Invalid)
{
EnsureOptionalFieldsAreAllocated(pEEClass, pamTracker, pAllocator->GetLowFrequencyHeap());
pEEClass->SetWinRTRedirectedTypeIndex(redirectedTypeIndex);
}
#endif // FEAUTRE_COMINTEROP
#ifdef _DEBUG
pModule->GetClassLoader()->m_dwDebugClasses++;
#endif
return pEEClass;
}
//*******************************************************************************
//
// Create a hash of all methods in this class. The hash is from method name to MethodDesc.
//
MethodTableBuilder::MethodNameHash *
MethodTableBuilder::CreateMethodChainHash(
MethodTable *pMT)
{
STANDARD_VM_CONTRACT;
MethodNameHash *pHash = new (GetStackingAllocator()) MethodNameHash();
pHash->Init(pMT->GetNumVirtuals(), GetStackingAllocator());
unsigned numVirtuals = GetParentMethodTable()->GetNumVirtuals();
for (unsigned i = 0; i < numVirtuals; ++i)
{
bmtMethodSlot &slot = (*bmtParent->pSlotTable)[i];
bmtRTMethod * pMethod = slot.Decl().AsRTMethod();
const MethodSignature &sig = pMethod->GetMethodSignature();
pHash->Insert(sig.GetName(), pMethod);
}
// Success
return pHash;
}
//*******************************************************************************
//
// Find a method in this class hierarchy - used ONLY by the loader during layout. Do not use at runtime.
//
// *ppMemberSignature must be NULL on entry - it and *pcMemberSignature may or may not be filled out
//
// ppMethodDesc will be filled out with NULL if no matching method in the hierarchy is found.
//
// Returns FALSE if there was an error of some kind.
//
// pMethodConstraintsMatch receives the result of comparing the method constraints.
MethodTableBuilder::bmtRTMethod *
MethodTableBuilder::LoaderFindMethodInParentClass(
const MethodSignature & methodSig,
BOOL * pMethodConstraintsMatch)
{
CONTRACTL
{
STANDARD_VM_CHECK;
PRECONDITION(CheckPointer(this));
PRECONDITION(CheckPointer(bmtParent));
PRECONDITION(CheckPointer(methodSig.GetModule()));
PRECONDITION(CheckPointer(methodSig.GetSignature()));
PRECONDITION(HasParent());
PRECONDITION(methodSig.GetSignatureLength() != 0);
}
CONTRACTL_END;
//#if 0
MethodNameHash::HashEntry * pEntry;
// Have we created a hash of all the methods in the class chain?
if (bmtParent->pParentMethodHash == NULL)
{
// There may be such a method, so we will now create a hash table to reduce the pain for
// further lookups
// <TODO> Are we really sure that this is worth doing? </TODO>
bmtParent->pParentMethodHash = CreateMethodChainHash(GetParentMethodTable());
}
// We have a hash table, so use it
pEntry = bmtParent->pParentMethodHash->Lookup(methodSig.GetName());
// Traverse the chain of all methods with this name
while (pEntry != NULL)
{
bmtRTMethod * pEntryMethod = pEntry->m_data;
const MethodSignature & entrySig = pEntryMethod->GetMethodSignature();
// Note instantiation info
{
if (methodSig.Equivalent(entrySig))
{
if (pMethodConstraintsMatch != NULL)
{
// Check the constraints are consistent,
// and return the result to the caller.
// We do this here to avoid recalculating pSubst.
*pMethodConstraintsMatch = MetaSig::CompareMethodConstraints(
&methodSig.GetSubstitution(), methodSig.GetModule(), methodSig.GetToken(),
&entrySig.GetSubstitution(), entrySig.GetModule(), entrySig.GetToken());
}
return pEntryMethod;
}
}
// Advance to next item in the hash chain which has the same name
pEntry = bmtParent->pParentMethodHash->FindNext(pEntry);
}
//#endif
//@TODO: Move to this code, as the use of a HashTable is broken; overriding semantics
//@TODO: require matching against the most-derived slot of a given name and signature,
//@TODO: (which deals specifically with newslot methods with identical name and sig), but
//@TODO: HashTables are by definition unordered and so we've only been getting by with the
//@TODO: implementation being compatible with the order in which methods were added to
//@TODO: the HashTable in CreateMethodChainHash.
#if 0
bmtParentInfo::Iterator it(bmtParent->IterateSlots());
it.MoveTo(static_cast<size_t>(GetParentMethodTable()->GetNumVirtuals()));
while (it.Prev())
{
bmtMethodHandle decl(it->Decl());
const MethodSignature &declSig(decl.GetMethodSignature());
if (declSig == methodSig)
{
if (pMethodConstraintsMatch != NULL)
{
// Check the constraints are consistent,
// and return the result to the caller.
// We do this here to avoid recalculating pSubst.
*pMethodConstraintsMatch = MetaSig::CompareMethodConstraints(
&methodSig.GetSubstitution(), methodSig.GetModule(), methodSig.GetToken(),
&declSig.GetSubstitution(), declSig.GetModule(), declSig.GetToken());
}
return decl.AsRTMethod();
}
}
#endif // 0
return NULL;
}
//*******************************************************************************
//
// Given an interface map to fill out, expand pNewInterface (and its sub-interfaces) into it, increasing
// pdwInterfaceListSize as appropriate, and avoiding duplicates.
//
void
MethodTableBuilder::ExpandApproxInterface(
bmtInterfaceInfo * bmtInterface, // out parameter, various parts cumulatively written to.
const Substitution * pNewInterfaceSubstChain,
MethodTable * pNewInterface,
InterfaceDeclarationScope declScope
COMMA_INDEBUG(MethodTable * dbg_pClassMT))
{
STANDARD_VM_CONTRACT;
//#ExpandingInterfaces
// We expand the tree of inherited interfaces into a set by adding the
// current node BEFORE expanding the parents of the current node.
// ****** This must be consistent with code:ExpandExactInterface *******
// ****** This must be consistent with code:ClassCompat::MethodTableBuilder::BuildInteropVTable_ExpandInterface *******
// The interface list contains the fully expanded set of interfaces from the parent then
// we start adding all the interfaces we declare. We need to know which interfaces
// we declare but do not need duplicates of the ones we declare. This means we can
// duplicate our parent entries.
// Is it already present in the list?
for (DWORD i = 0; i < bmtInterface->dwInterfaceMapSize; i++)
{
bmtInterfaceEntry * pItfEntry = &bmtInterface->pInterfaceMap[i];
bmtRTType * pItfType = pItfEntry->GetInterfaceType();
// Type Equivalence is not respected for this comparision as you can have multiple type equivalent interfaces on a class
TokenPairList newVisited = TokenPairList::AdjustForTypeEquivalenceForbiddenScope(NULL);
if (MetaSig::CompareTypeDefsUnderSubstitutions(pItfType->GetMethodTable(),
pNewInterface,
&pItfType->GetSubstitution(),
pNewInterfaceSubstChain,
&newVisited))
{
if (declScope.fIsInterfaceDeclaredOnType)
{
pItfEntry->IsDeclaredOnType() = true;
}
#ifdef _DEBUG
//#InjectInterfaceDuplicates_ApproxInterfaces
// We can inject duplicate interfaces in check builds.
// Has to be in sync with code:#InjectInterfaceDuplicates_Main
if (((dbg_pClassMT == NULL) && bmtInterface->dbg_fShouldInjectInterfaceDuplicates) ||
((dbg_pClassMT != NULL) && dbg_pClassMT->Debug_HasInjectedInterfaceDuplicates()))
{
// The injected duplicate interface should have the same status 'ImplementedByParent' as
// the original interface (can be false if the interface is implemented indirectly twice)
declScope.fIsInterfaceDeclaredOnParent = pItfEntry->IsImplementedByParent();
// Just pretend we didn't find this match, but mark all duplicates as 'DeclaredOnType' if
// needed
continue;
}
#endif //_DEBUG
return; // found it, don't add it again
}
}
bmtRTType * pNewItfType =
new (GetStackingAllocator()) bmtRTType(*pNewInterfaceSubstChain, pNewInterface);
if (bmtInterface->dwInterfaceMapSize >= bmtInterface->dwInterfaceMapAllocated)
{
//
// Grow the array of interfaces
//
S_UINT32 dwNewAllocated = S_UINT32(2) * S_UINT32(bmtInterface->dwInterfaceMapAllocated) + S_UINT32(5);
if (dwNewAllocated.IsOverflow())
{
BuildMethodTableThrowException(COR_E_OVERFLOW);
}
S_SIZE_T safeSize = S_SIZE_T(sizeof(bmtInterfaceEntry)) *
S_SIZE_T(dwNewAllocated.Value());
if (safeSize.IsOverflow())
{
BuildMethodTableThrowException(COR_E_OVERFLOW);
}
bmtInterfaceEntry * pNewMap = (bmtInterfaceEntry *)new (GetStackingAllocator()) BYTE[safeSize.Value()];
memcpy(pNewMap, bmtInterface->pInterfaceMap, sizeof(bmtInterfaceEntry) * bmtInterface->dwInterfaceMapAllocated);
bmtInterface->pInterfaceMap = pNewMap;
bmtInterface->dwInterfaceMapAllocated = dwNewAllocated.Value();
}
// The interface map memory was just allocated as an array of bytes, so we use
// in place new to init the new map entry. No need to do anything with the result,
// so just chuck it.
CONSISTENCY_CHECK(bmtInterface->dwInterfaceMapSize < bmtInterface->dwInterfaceMapAllocated);
new ((void *)&bmtInterface->pInterfaceMap[bmtInterface->dwInterfaceMapSize])
bmtInterfaceEntry(pNewItfType, declScope);
bmtInterface->dwInterfaceMapSize++;
// Make sure to pass in the substitution from the new itf type created above as
// these methods assume that substitutions are allocated in the stacking heap,
// not the stack.
InterfaceDeclarationScope declaredItfScope(declScope.fIsInterfaceDeclaredOnParent, false);
ExpandApproxDeclaredInterfaces(
bmtInterface,
bmtTypeHandle(pNewItfType),
declaredItfScope
COMMA_INDEBUG(dbg_pClassMT));
} // MethodTableBuilder::ExpandApproxInterface
//*******************************************************************************
// Arguments:
// dbg_pClassMT - Class on which the interfaces are declared (either explicitly or implicitly).
// It will never be an interface. It may be NULL (if it is the type being built).
void
MethodTableBuilder::ExpandApproxDeclaredInterfaces(
bmtInterfaceInfo * bmtInterface, // out parameter, various parts cumulatively written to.
bmtTypeHandle thType,
InterfaceDeclarationScope declScope
COMMA_INDEBUG(MethodTable * dbg_pClassMT))
{
STANDARD_VM_CONTRACT;
_ASSERTE((dbg_pClassMT == NULL) || !dbg_pClassMT->IsInterface());
HRESULT hr;
// Iterate the list of interfaces declared by thType and add them to the map.
InterfaceImplEnum ie(thType.GetModule(), thType.GetTypeDefToken(), &thType.GetSubstitution());
while ((hr = ie.Next()) == S_OK)
{
MethodTable *pGenericIntf = ClassLoader::LoadApproxTypeThrowing(
thType.GetModule(), ie.CurrentToken(), NULL, NULL).GetMethodTable();
CONSISTENCY_CHECK(pGenericIntf->IsInterface());
ExpandApproxInterface(bmtInterface,
ie.CurrentSubst(),
pGenericIntf,
declScope
COMMA_INDEBUG(dbg_pClassMT));
}
if (FAILED(hr))
{
BuildMethodTableThrowException(IDS_CLASSLOAD_BADFORMAT);
}
} // MethodTableBuilder::ExpandApproxDeclaredInterfaces
//*******************************************************************************
void
MethodTableBuilder::ExpandApproxInheritedInterfaces(
bmtInterfaceInfo * bmtInterface,
bmtRTType * pParentType)
{
STANDARD_VM_CONTRACT;
INTERIOR_STACK_PROBE(GetThread());
// Expand interfaces in superclasses first. Interfaces inherited from parents
// must have identical indexes as in the parent.
bmtRTType * pParentOfParent = pParentType->GetParentType();
//#InterfaceMap_SupersetOfParent
// We have to load parent's interface map the same way the parent did it (as open type).
// Further code depends on this:
// code:#InterfaceMap_UseParentInterfaceImplementations
// We check that it is truth:
// code:#ApproxInterfaceMap_SupersetOfParent
// code:#ExactInterfaceMap_SupersetOfParent
//
//#InterfaceMap_CanonicalSupersetOfParent
// Note that canonical instantiation of parent can have different interface instantiations in the
// interface map than derived type:
// class MyClass<T> : MyBase<string, T>, I<T>
// class MyBase<U, V> : I<U>
// Type MyClass<_Canon> has MyBase<_Canon,_Canon> as parent. The interface maps are:
// MyBase<_Canon,_Canon> ... I<_Canon>
// MyClass<_Canon> ... I<string> (#1)
// I<_Canon> (#2)
// The I's instantiation I<string> (#1) in MyClass and I<_Canon> from MyBase are not the same
// instantiations.
// Backup parent substitution
Substitution parentSubstitution = pParentType->GetSubstitution();
// Make parent an open type
pParentType->SetSubstitution(Substitution());
if (pParentOfParent != NULL)
{
ExpandApproxInheritedInterfaces(bmtInterface, pParentOfParent);
}
InterfaceDeclarationScope declScope(true, false);
ExpandApproxDeclaredInterfaces(
bmtInterface,
bmtTypeHandle(pParentType),
declScope
COMMA_INDEBUG(pParentType->GetMethodTable()));
// Make sure we loaded the same number of interfaces as the parent type itself
CONSISTENCY_CHECK(pParentType->GetMethodTable()->GetNumInterfaces() == bmtInterface->dwInterfaceMapSize);
// Restore parent's substitution
pParentType->SetSubstitution(parentSubstitution);
END_INTERIOR_STACK_PROBE;
} // MethodTableBuilder::ExpandApproxInheritedInterfaces
//*******************************************************************************
// Fill out a fully expanded interface map, such that if we are declared to
// implement I3, and I3 extends I1,I2, then I1,I2 are added to our list if
// they are not already present.
void
MethodTableBuilder::LoadApproxInterfaceMap()
{
STANDARD_VM_CONTRACT;
bmtInterface->dwInterfaceMapSize = 0;
#ifdef _DEBUG
//#InjectInterfaceDuplicates_Main
// We will inject duplicate interfaces in check builds if env. var.
// COMPLUS_INTERNAL_TypeLoader_InjectInterfaceDuplicates is set to TRUE for all types (incl. non-generic
// types).
// This should allow us better test coverage of duplicates in interface map.
//
// The duplicates are legal for some types:
// A<T> : I<T>
// B<U,V> : A<U>, I<V>
// C : B<int,int>
// where the interface maps are:
// A<T> ... 1 item: I<T>
// A<int> ... 1 item: I<int>
// B<U,V> ... 2 items: I<U>, I<V>
// B<int,int> ... 2 items: I<int>, I<int>
// B<_Canon,_Canon> ... 2 items: I<_Canon>, I<_Canon>
// B<string,string> ... 2 items: I<string>, I<string>
// C ... 2 items: I<int>, I<int>
// Note: C had only 1 item (I<int>) in CLR 2.0 RTM/SP1/SP2 and early in CLR 4.0.
//
// We will create duplicate from every re-implemented interface (incl. non-generic):
// code:#InjectInterfaceDuplicates_ApproxInterfaces
// code:#InjectInterfaceDuplicates_LoadExactInterfaceMap
// code:#InjectInterfaceDuplicates_ExactInterfaces
//
// Note that we don't have to do anything for COM, because COM has its own interface map
// (code:InteropMethodTableData)which is independent on type's interface map and is created only from
// non-generic interfaces (see code:ClassCompat::MethodTableBuilder::BuildInteropVTable_InterfaceList)
// We need to keep track which interface duplicates were injected. Right now its either all interfaces
// (declared on the type being built, not inheritted) or none. In the future we could inject duplicates
// just for some of them.
bmtInterface->dbg_fShouldInjectInterfaceDuplicates =
(CLRConfig::GetConfigValue(CLRConfig::INTERNAL_TypeLoader_InjectInterfaceDuplicates) != 0);
if (bmtGenerics->Debug_GetTypicalMethodTable() != NULL)
{ // It's safer to require that all instantiations have the same injected interface duplicates.
// In future we could inject different duplicates for various non-shared instantiations.
// Use the same injection status as typical instantiation
bmtInterface->dbg_fShouldInjectInterfaceDuplicates =
bmtGenerics->Debug_GetTypicalMethodTable()->Debug_HasInjectedInterfaceDuplicates();
if (GetModule() == g_pObjectClass->GetModule())
{ // mscorlib has some weird hardcoded information about interfaces (e.g.
// code:CEEPreloader::ApplyTypeDependencyForSZArrayHelper), so we don't inject duplicates into
// mscorlib types
bmtInterface->dbg_fShouldInjectInterfaceDuplicates = FALSE;
}
}
#endif //_DEBUG
// First inherit all the parent's interfaces. This is important, because our interface map must
// list the interfaces in identical order to our parent.
//
// <NICE> we should document the reasons why. One reason is that DispatchMapTypeIDs can be indexes
// into the list </NICE>
if (HasParent())
{
ExpandApproxInheritedInterfaces(bmtInterface, GetParentType());
#ifdef _DEBUG
//#ApproxInterfaceMap_SupersetOfParent
// Check that parent's interface map is the same as what we just computed
// See code:#InterfaceMap_SupersetOfParent
{
MethodTable * pParentMT = GetParentMethodTable();
_ASSERTE(pParentMT->GetNumInterfaces() == bmtInterface->dwInterfaceMapSize);
MethodTable::InterfaceMapIterator parentInterfacesIterator = pParentMT->IterateInterfaceMap();
UINT32 nInterfaceIndex = 0;
while (parentInterfacesIterator.Next())
{
// Compare TypeDefs of the parent's interface and this interface (full MT comparison is in
// code:#ExactInterfaceMap_SupersetOfParent)
OVERRIDE_TYPE_LOAD_LEVEL_LIMIT(CLASS_LOAD_APPROXPARENTS);
_ASSERTE(parentInterfacesIterator.GetInterfaceInfo()->GetApproxMethodTable(pParentMT->GetLoaderModule())->HasSameTypeDefAs(
bmtInterface->pInterfaceMap[nInterfaceIndex].GetInterfaceType()->GetMethodTable()));
nInterfaceIndex++;
}
_ASSERTE(nInterfaceIndex == bmtInterface->dwInterfaceMapSize);
}
#endif //_DEBUG
}
// Now add in any freshly declared interfaces, possibly augmenting the flags
InterfaceDeclarationScope declScope(false, true);
ExpandApproxDeclaredInterfaces(
bmtInterface,
bmtInternal->pType,
declScope
COMMA_INDEBUG(NULL));
} // MethodTableBuilder::LoadApproxInterfaceMap
//*******************************************************************************
// Fills array of TypeIDs with all duplicate occurences of pDeclIntfMT in the interface map.
//
// Arguments:
// rg/c DispatchMapTypeIDs - Array of TypeIDs and its count of elements.
// pcIfaceDuplicates - Number of duplicate occurences of the interface in the interface map (ideally <=
// count of elements TypeIDs.
//
// Note: If the passed rgDispatchMapTypeIDs array is smaller than the number of duplicates, fills it
// with the duplicates that fit and returns number of all existing duplicates (not just those fileld in the
// array) in pcIfaceDuplicates.
//
void
MethodTableBuilder::ComputeDispatchMapTypeIDs(
MethodTable * pDeclInftMT,
const Substitution * pDeclIntfSubst,
DispatchMapTypeID * rgDispatchMapTypeIDs,
UINT32 cDispatchMapTypeIDs,
UINT32 * pcIfaceDuplicates)
{
STANDARD_VM_CONTRACT;
_ASSERTE(pDeclInftMT->IsInterface());
// Count of interface duplicates (also used as index into TypeIDs array)
*pcIfaceDuplicates = 0;
for (DWORD idx = 0; idx < bmtInterface->dwInterfaceMapSize; idx++)
{
bmtInterfaceEntry * pItfEntry = &bmtInterface->pInterfaceMap[idx];
bmtRTType * pItfType = pItfEntry->GetInterfaceType();
// Type Equivalence is forbidden in interface type ids.
TokenPairList newVisited = TokenPairList::AdjustForTypeEquivalenceForbiddenScope(NULL);
if (MetaSig::CompareTypeDefsUnderSubstitutions(pItfType->GetMethodTable(),
pDeclInftMT,
&pItfType->GetSubstitution(),
pDeclIntfSubst,
&newVisited))
{ // We found another occurence of this interface
// Can we fit it into the TypeID array?
if (*pcIfaceDuplicates < cDispatchMapTypeIDs)
{
rgDispatchMapTypeIDs[*pcIfaceDuplicates] = DispatchMapTypeID::InterfaceClassID(idx);
}
// Increase number of duplicate interfaces
(*pcIfaceDuplicates)++;
}
}
} // MethodTableBuilder::ComputeDispatchMapTypeIDs
//*******************************************************************************
/*static*/
VOID DECLSPEC_NORETURN
MethodTableBuilder::BuildMethodTableThrowException(
HRESULT hr,
const bmtErrorInfo & bmtError)
{
CONTRACTL
{
THROWS;
GC_TRIGGERS;
INJECT_FAULT(COMPlusThrowOM(););
}
CONTRACTL_END
LPCUTF8 pszClassName, pszNameSpace;
if (FAILED(bmtError.pModule->GetMDImport()->GetNameOfTypeDef(bmtError.cl, &pszClassName, &pszNameSpace)))
{
pszClassName = pszNameSpace = "Invalid TypeDef record";
}
if (IsNilToken(bmtError.dMethodDefInError) && (bmtError.szMethodNameForError == NULL))
{
if (hr == E_OUTOFMEMORY)
{
COMPlusThrowOM();
}
else
bmtError.pModule->GetAssembly()->ThrowTypeLoadException(
pszNameSpace, pszClassName, bmtError.resIDWhy);
}
else
{
LPCUTF8 szMethodName;
if (bmtError.szMethodNameForError == NULL)
{
if (FAILED((bmtError.pModule->GetMDImport())->GetNameOfMethodDef(bmtError.dMethodDefInError, &szMethodName)))
{
szMethodName = "Invalid MethodDef record";
}
}
else
{
szMethodName = bmtError.szMethodNameForError;
}
bmtError.pModule->GetAssembly()->ThrowTypeLoadException(
pszNameSpace, pszClassName, szMethodName, bmtError.resIDWhy);
}
} // MethodTableBuilder::BuildMethodTableThrowException
//*******************************************************************************
void MethodTableBuilder::SetBMTData(
LoaderAllocator *bmtAllocator,
bmtErrorInfo *bmtError,
bmtProperties *bmtProp,
bmtVtable *bmtVT,
bmtParentInfo *bmtParent,
bmtInterfaceInfo *bmtInterface,
bmtMetaDataInfo *bmtMetaData,
bmtMethodInfo *bmtMethod,
bmtMethAndFieldDescs *bmtMFDescs,
bmtFieldPlacement *bmtFP,
bmtInternalInfo *bmtInternal,
bmtGCSeriesInfo *bmtGCSeries,
bmtMethodImplInfo *bmtMethodImpl,
const bmtGenericsInfo *bmtGenerics,
bmtEnumFieldInfo *bmtEnumFields,
bmtContextStaticInfo *bmtCSInfo)
{
LIMITED_METHOD_CONTRACT;
this->bmtAllocator = bmtAllocator;
this->bmtError = bmtError;
this->bmtProp = bmtProp;
this->bmtVT = bmtVT;
this->bmtParent = bmtParent;
this->bmtInterface = bmtInterface;
this->bmtMetaData = bmtMetaData;
this->bmtMethod = bmtMethod;
this->bmtMFDescs = bmtMFDescs;
this->bmtFP = bmtFP;
this->bmtInternal = bmtInternal;
this->bmtGCSeries = bmtGCSeries;
this->bmtMethodImpl = bmtMethodImpl;
this->bmtGenerics = bmtGenerics;
this->bmtEnumFields = bmtEnumFields;
this->bmtCSInfo = bmtCSInfo;
}
//*******************************************************************************
// Used by MethodTableBuilder
MethodTableBuilder::bmtRTType *
MethodTableBuilder::CreateTypeChain(
MethodTable * pMT,
const Substitution & subst)
{
CONTRACTL
{
STANDARD_VM_CHECK;
INSTANCE_CHECK;
PRECONDITION(CheckPointer(GetStackingAllocator()));
PRECONDITION(CheckPointer(pMT));
} CONTRACTL_END;
pMT = pMT->GetCanonicalMethodTable();
bmtRTType * pType = new (GetStackingAllocator())
bmtRTType(subst, pMT);
MethodTable * pMTParent = pMT->GetParentMethodTable();
if (pMTParent != NULL)
{
pType->SetParentType(
CreateTypeChain(
pMTParent,
pMT->GetSubstitutionForParent(&pType->GetSubstitution())));
}
return pType;
}
//*******************************************************************************
/* static */
MethodTableBuilder::bmtRTType *
MethodTableBuilder::bmtRTType::FindType(
bmtRTType * pType,
MethodTable * pTargetMT)
{
CONTRACTL {
STANDARD_VM_CHECK;
PRECONDITION(CheckPointer(pType));
PRECONDITION(CheckPointer(pTargetMT));
} CONTRACTL_END;
pTargetMT = pTargetMT->GetCanonicalMethodTable();
while (pType != NULL &&
pType->GetMethodTable()->GetCanonicalMethodTable() != pTargetMT)
{
pType = pType->GetParentType();
}
return pType;
}
//*******************************************************************************
mdTypeDef
MethodTableBuilder::bmtRTType::GetEnclosingTypeToken() const
{
STANDARD_VM_CONTRACT;
mdTypeDef tok = mdTypeDefNil;
if (IsNested())
{ // This is guaranteed to succeed because the EEClass would not have been
// set as nested unless a valid token was stored in metadata.
if (FAILED(GetModule()->GetMDImport()->GetNestedClassProps(
GetTypeDefToken(), &tok)))
{
return mdTypeDefNil;
}
}
return tok;
}
//*******************************************************************************
/*static*/ bool
MethodTableBuilder::MethodSignature::NamesEqual(
const MethodSignature & sig1,
const MethodSignature & sig2)
{
STANDARD_VM_CONTRACT;
if (sig1.GetNameHash() != sig2.GetNameHash())
{
return false;
}
if (strcmp(sig1.GetName(), sig2.GetName()) != 0)
{
return false;
}
return true;
}
//*******************************************************************************
/*static*/ bool
MethodTableBuilder::MethodSignature::SignaturesEquivalent(
const MethodSignature & sig1,
const MethodSignature & sig2)
{
STANDARD_VM_CONTRACT;
return !!MetaSig::CompareMethodSigs(
sig1.GetSignature(), static_cast<DWORD>(sig1.GetSignatureLength()), sig1.GetModule(), &sig1.GetSubstitution(),
sig2.GetSignature(), static_cast<DWORD>(sig2.GetSignatureLength()), sig2.GetModule(), &sig2.GetSubstitution());
}
//*******************************************************************************
/*static*/ bool
MethodTableBuilder::MethodSignature::SignaturesExactlyEqual(
const MethodSignature & sig1,
const MethodSignature & sig2)
{
STANDARD_VM_CONTRACT;
TokenPairList newVisited = TokenPairList::AdjustForTypeEquivalenceForbiddenScope(NULL);
return !!MetaSig::CompareMethodSigs(
sig1.GetSignature(), static_cast<DWORD>(sig1.GetSignatureLength()), sig1.GetModule(), &sig1.GetSubstitution(),
sig2.GetSignature(), static_cast<DWORD>(sig2.GetSignatureLength()), sig2.GetModule(), &sig2.GetSubstitution(),
&newVisited);
}
//*******************************************************************************
bool
MethodTableBuilder::MethodSignature::Equivalent(
const MethodSignature &rhs) const
{
STANDARD_VM_CONTRACT;
return NamesEqual(*this, rhs) && SignaturesEquivalent(*this, rhs);
}
//*******************************************************************************
bool
MethodTableBuilder::MethodSignature::ExactlyEqual(
const MethodSignature &rhs) const
{
STANDARD_VM_CONTRACT;
return NamesEqual(*this, rhs) && SignaturesExactlyEqual(*this, rhs);
}
//*******************************************************************************
void
MethodTableBuilder::MethodSignature::GetMethodAttributes() const
{
STANDARD_VM_CONTRACT;
IMDInternalImport * pIMD = GetModule()->GetMDImport();
if (TypeFromToken(GetToken()) == mdtMethodDef)
{
DWORD cSig;
if (FAILED(pIMD->GetNameAndSigOfMethodDef(GetToken(), &m_pSig, &cSig, &m_szName)))
{ // We have empty name or signature on error, do nothing
}
m_cSig = static_cast<size_t>(cSig);
}
else
{
CONSISTENCY_CHECK(TypeFromToken(m_tok) == mdtMemberRef);
DWORD cSig;
if (FAILED(pIMD->GetNameAndSigOfMemberRef(GetToken(), &m_pSig, &cSig, &m_szName)))
{ // We have empty name or signature on error, do nothing
}
m_cSig = static_cast<size_t>(cSig);
}
}
//*******************************************************************************
UINT32
MethodTableBuilder::MethodSignature::GetNameHash() const
{
STANDARD_VM_CONTRACT;
CheckGetMethodAttributes();
if (m_nameHash == INVALID_NAME_HASH)
{
ULONG nameHash = HashStringA(GetName());
if (nameHash == INVALID_NAME_HASH)
{
nameHash /= 2;
}
m_nameHash = nameHash;
}
return m_nameHash;
}
//*******************************************************************************
MethodTableBuilder::bmtMDType::bmtMDType(
bmtRTType * pParentType,
Module * pModule,
mdTypeDef tok,
const SigTypeContext & sigContext)
: m_pParentType(pParentType),
m_pModule(pModule),
m_tok(tok),
m_enclTok(mdTypeDefNil),
m_sigContext(sigContext),
m_subst(),
m_dwAttrs(0),
m_pMT(NULL)
{
STANDARD_VM_CONTRACT;
IfFailThrow(m_pModule->GetMDImport()->GetTypeDefProps(m_tok, &m_dwAttrs, NULL));
HRESULT hr = m_pModule->GetMDImport()->GetNestedClassProps(m_tok, &m_enclTok);
if (FAILED(hr))
{
if (hr != CLDB_E_RECORD_NOTFOUND)
{
ThrowHR(hr);
}
// Just in case GetNestedClassProps sets the out param to some other value
m_enclTok = mdTypeDefNil;
}
}
//*******************************************************************************
MethodTableBuilder::bmtRTMethod::bmtRTMethod(
bmtRTType * pOwningType,
MethodDesc * pMD)
: m_pOwningType(pOwningType),
m_pMD(pMD),
m_methodSig(pMD->GetModule(),
pMD->GetMemberDef(),
&pOwningType->GetSubstitution())
{
CONTRACTL
{
THROWS;
GC_TRIGGERS;
MODE_ANY;
}
CONTRACTL_END;
}
//*******************************************************************************
MethodTableBuilder::bmtMDMethod::bmtMDMethod(
bmtMDType * pOwningType,
mdMethodDef tok,
DWORD dwDeclAttrs,
DWORD dwImplAttrs,
DWORD dwRVA,
METHOD_TYPE type,
METHOD_IMPL_TYPE implType)
: m_pOwningType(pOwningType),
m_dwDeclAttrs(dwDeclAttrs),
m_dwImplAttrs(dwImplAttrs),
m_dwRVA(dwRVA),
m_type(type),
m_implType(implType),
m_methodSig(pOwningType->GetModule(),
tok,
&pOwningType->GetSubstitution()),
m_pMD(NULL),
m_pUnboxedMD(NULL),
m_slotIndex(INVALID_SLOT_INDEX),
m_unboxedSlotIndex(INVALID_SLOT_INDEX)
{
CONTRACTL
{
THROWS;
GC_TRIGGERS;
MODE_ANY;
}
CONTRACTL_END;
}
//*******************************************************************************
void
MethodTableBuilder::ImportParentMethods()
{
STANDARD_VM_CONTRACT;
if (!HasParent())
{ // If there's no parent, there's no methods to import
return;
}
SLOT_INDEX numMethods = static_cast<SLOT_INDEX>
(GetParentMethodTable()->GetNumMethods());
bmtParent->pSlotTable = new (GetStackingAllocator())
bmtMethodSlotTable(numMethods, GetStackingAllocator());
MethodTable::MethodIterator it(GetParentMethodTable());
for (;it.IsValid(); it.Next())
{
MethodDesc * pDeclDesc = NULL;
MethodTable * pDeclMT = NULL;
MethodDesc * pImplDesc = NULL;
MethodTable * pImplMT = NULL;
if (it.IsVirtual())
{
pDeclDesc = it.GetDeclMethodDesc();
pDeclMT = pDeclDesc->GetMethodTable();
pImplDesc = it.GetMethodDesc();
pImplMT = pImplDesc->GetMethodTable();
}
else
{
pDeclDesc = pImplDesc = it.GetMethodDesc();
pDeclMT = pImplMT = it.GetMethodDesc()->GetMethodTable();
}
CONSISTENCY_CHECK(CheckPointer(pDeclDesc));
CONSISTENCY_CHECK(CheckPointer(pImplDesc));
// Create and assign to each slot
bmtMethodSlot newSlot;
newSlot.Decl() = new (GetStackingAllocator())
bmtRTMethod(bmtRTType::FindType(GetParentType(), pDeclMT), pDeclDesc);
if (pDeclDesc == pImplDesc)
{
newSlot.Impl() = newSlot.Decl();
}
else
{
newSlot.Impl() = new (GetStackingAllocator())
bmtRTMethod(bmtRTType::FindType(GetParentType(), pImplMT), pImplDesc);
}
if (!bmtParent->pSlotTable->AddMethodSlot(newSlot))
BuildMethodTableThrowException(IDS_CLASSLOAD_TOO_MANY_METHODS);
}
}
//*******************************************************************************
void
MethodTableBuilder::CopyParentVtable()
{
STANDARD_VM_CONTRACT;
if (!HasParent())
{
return;
}
for (bmtParentInfo::Iterator it = bmtParent->IterateSlots();
!it.AtEnd() && it.CurrentIndex() < GetParentMethodTable()->GetNumVirtuals();
++it)
{
if (!bmtVT->pSlotTable->AddMethodSlot(*it))
BuildMethodTableThrowException(IDS_CLASSLOAD_TOO_MANY_METHODS);
++bmtVT->cVirtualSlots;
++bmtVT->cTotalSlots;
}
}
//*******************************************************************************
// Determine if this is the special SIMD type System.Numerics.Vector<T>, whose
// size is determined dynamically based on the hardware and the presence of JIT
// support.
// If so:
// - Update the NumInstanceFieldBytes on the bmtFieldPlacement.
// - Update the m_cbNativeSize and m_cbManagedSize if HasLayout() is true.
// Return a BOOL result to indicate whether the size has been updated.
//
// Will throw IDS_EE_SIMD_NGEN_DISALLOWED if the type is System.Numerics.Vector`1
// and this is an ngen compilation process.
//
BOOL MethodTableBuilder::CheckIfSIMDAndUpdateSize()
{
STANDARD_VM_CONTRACT;
#if defined(_TARGET_X86_) || defined(_TARGET_AMD64_)
if (!(GetAssembly()->IsSIMDVectorAssembly() || bmtProp->fIsIntrinsicType))
return false;
if (bmtFP->NumInstanceFieldBytes != 16)
return false;
LPCUTF8 className;
LPCUTF8 nameSpace;
if (FAILED(GetMDImport()->GetNameOfTypeDef(bmtInternal->pType->GetTypeDefToken(), &className, &nameSpace)))
return false;
if (strcmp(className, "Vector`1") != 0 || strcmp(nameSpace, "System.Numerics") != 0)
return false;
if (IsCompilationProcess())
{
COMPlusThrow(kTypeLoadException, IDS_EE_SIMD_NGEN_DISALLOWED);
}
#ifndef CROSSGEN_COMPILE
if (!TargetHasAVXSupport())
return false;
EEJitManager *jitMgr = ExecutionManager::GetEEJitManager();
if (jitMgr->LoadJIT())
{
CORJIT_FLAGS cpuCompileFlags = jitMgr->GetCPUCompileFlags();
if (cpuCompileFlags.IsSet(CORJIT_FLAGS::CORJIT_FLAG_FEATURE_SIMD))
{
unsigned intrinsicSIMDVectorLength = jitMgr->m_jit->getMaxIntrinsicSIMDVectorLength(cpuCompileFlags);
if (intrinsicSIMDVectorLength != 0)
{
bmtFP->NumInstanceFieldBytes = intrinsicSIMDVectorLength;
if (HasLayout())
{
GetLayoutInfo()->m_cbNativeSize = intrinsicSIMDVectorLength;
GetLayoutInfo()->m_cbManagedSize = intrinsicSIMDVectorLength;
}
return true;
}
}
}
#endif // !CROSSGEN_COMPILE
#endif // defined(_TARGET_X86_) || defined(_TARGET_AMD64_)
return false;
}
//*******************************************************************************
void
MethodTableBuilder::bmtInterfaceEntry::CreateSlotTable(
StackingAllocator * pStackingAllocator)
{
STANDARD_VM_CONTRACT;
CONSISTENCY_CHECK(m_pImplTable == NULL);
SLOT_INDEX cSlots = (SLOT_INDEX)GetInterfaceType()->GetMethodTable()->GetNumVirtuals();
bmtInterfaceSlotImpl * pST = new (pStackingAllocator) bmtInterfaceSlotImpl[cSlots];
MethodTable::MethodIterator it(GetInterfaceType()->GetMethodTable());
for (; it.IsValid(); it.Next())
{
if (!it.IsVirtual())
{
break;
}
bmtRTMethod * pCurMethod = new (pStackingAllocator)
bmtRTMethod(GetInterfaceType(), it.GetDeclMethodDesc());
CONSISTENCY_CHECK(m_cImplTable == it.GetSlotNumber());
pST[m_cImplTable++] = bmtInterfaceSlotImpl(pCurMethod, INVALID_SLOT_INDEX);
}
m_pImplTable = pST;
}
#ifdef _PREFAST_
#pragma warning(push)
#pragma warning(disable:21000) // Suppress PREFast warning about overly large function
#endif // _PREFAST_
//---------------------------------------------------------------------------------------
//
// Builds the method table, allocates MethodDesc, handles overloaded members, attempts to compress
// interface storage. All dependent classes must already be resolved!
//
MethodTable *
MethodTableBuilder::BuildMethodTableThrowing(
LoaderAllocator * pAllocator,
Module * pLoaderModule,
Module * pModule,
mdToken cl,
BuildingInterfaceInfo_t * pBuildingInterfaceList,
const LayoutRawFieldInfo * pLayoutRawFieldInfos,
MethodTable * pParentMethodTable,
const bmtGenericsInfo * bmtGenericsInfo,
SigPointer parentInst,
WORD cBuildingInterfaceList)
{
CONTRACTL
{
STANDARD_VM_CHECK;
PRECONDITION(CheckPointer(GetHalfBakedClass()));
PRECONDITION(CheckPointer(bmtGenericsInfo));
}
CONTRACTL_END;
pModule->EnsureLibraryLoaded();
// The following structs, defined as private members of MethodTableBuilder, contain the necessary local
// parameters needed for BuildMethodTable Look at the struct definitions for a detailed list of all
// parameters available to BuildMethodTableThrowing.
SetBMTData(
pAllocator,
new (GetStackingAllocator()) bmtErrorInfo(),
new (GetStackingAllocator()) bmtProperties(),
new (GetStackingAllocator()) bmtVtable(),
new (GetStackingAllocator()) bmtParentInfo(),
new (GetStackingAllocator()) bmtInterfaceInfo(),
new (GetStackingAllocator()) bmtMetaDataInfo(),
new (GetStackingAllocator()) bmtMethodInfo(),
new (GetStackingAllocator()) bmtMethAndFieldDescs(),
new (GetStackingAllocator()) bmtFieldPlacement(),
new (GetStackingAllocator()) bmtInternalInfo(),
new (GetStackingAllocator()) bmtGCSeriesInfo(),
new (GetStackingAllocator()) bmtMethodImplInfo(),
bmtGenericsInfo,
new (GetStackingAllocator()) bmtEnumFieldInfo(pModule->GetMDImport()),
new (GetStackingAllocator()) bmtContextStaticInfo());
//Initialize structs
bmtError->resIDWhy = IDS_CLASSLOAD_GENERAL; // Set the reason and the offending method def. If the method information
bmtError->pThrowable = NULL;
bmtError->pModule = pModule;
bmtError->cl = cl;
bmtInternal->pInternalImport = pModule->GetMDImport();
bmtInternal->pModule = pModule;
bmtInternal->pParentMT = pParentMethodTable;
// Create the chain of bmtRTType for the parent types. This allows all imported
// parent methods to be associated with their declaring types, and as such it is
// easy to access the appropriate Substitution when comparing signatures.
bmtRTType * pParent = NULL;
if (pParentMethodTable != NULL)
{
Substitution * pParentSubst =
new (GetStackingAllocator()) Substitution(pModule, parentInst, NULL);
pParent = CreateTypeChain(pParentMethodTable, *pParentSubst);
}
// Now create the bmtMDType for the type being built.
bmtInternal->pType = new (GetStackingAllocator())
bmtMDType(pParent, pModule, cl, bmtGenericsInfo->typeContext);
// put the interior stack probe after all the stack-allocted goop above. We check compare our this pointer to the SP on
// the dtor to determine if we are being called on an EH path or not.
INTERIOR_STACK_PROBE_FOR(GetThread(), 8);
// If not NULL, it means there are some by-value fields, and this contains an entry for each inst
#ifdef _DEBUG
// Set debug class name string for easier debugging.
LPCUTF8 className;
LPCUTF8 nameSpace;
if (FAILED(GetMDImport()->GetNameOfTypeDef(bmtInternal->pType->GetTypeDefToken(), &className, &nameSpace)))
{
className = nameSpace = "Invalid TypeDef record";
}
{
S_SIZE_T safeLen = S_SIZE_T(sizeof(char))*(S_SIZE_T(strlen(className)) + S_SIZE_T(strlen(nameSpace)) + S_SIZE_T(2));
if(safeLen.IsOverflow()) COMPlusThrowHR(COR_E_OVERFLOW);
size_t len = safeLen.Value();
char *name = (char*) AllocateFromHighFrequencyHeap(safeLen);
strcpy_s(name, len, nameSpace);
if (strlen(nameSpace) > 0) {
name[strlen(nameSpace)] = '.';
name[strlen(nameSpace) + 1] = '\0';
}
strcat_s(name, len, className);
GetHalfBakedClass()->SetDebugClassName(name);
}
if (g_pConfig->ShouldBreakOnClassBuild(className))
{
CONSISTENCY_CHECK_MSGF(false, ("BreakOnClassBuild: typename '%s' ", className));
GetHalfBakedClass()->m_fDebuggingClass = TRUE;
}
LPCUTF8 pszDebugName,pszDebugNamespace;
if (FAILED(pModule->GetMDImport()->GetNameOfTypeDef(bmtInternal->pType->GetTypeDefToken(), &pszDebugName, &pszDebugNamespace)))
{
pszDebugName = pszDebugNamespace = "Invalid TypeDef record";
}
StackSString debugName(SString::Utf8, pszDebugName);
// If there is an instantiation, update the debug name to include instantiation type names.
if (bmtGenerics->HasInstantiation())
{
StackSString debugName(SString::Utf8, GetDebugClassName());
TypeString::AppendInst(debugName, bmtGenerics->GetInstantiation(), TypeString::FormatBasic);
StackScratchBuffer buff;
const char* pDebugNameUTF8 = debugName.GetUTF8(buff);
S_SIZE_T safeLen = S_SIZE_T(strlen(pDebugNameUTF8)) + S_SIZE_T(1);
if(safeLen.IsOverflow())
COMPlusThrowHR(COR_E_OVERFLOW);
size_t len = safeLen.Value();
char *name = (char*) AllocateFromLowFrequencyHeap(safeLen);
strcpy_s(name, len, pDebugNameUTF8);
GetHalfBakedClass()->SetDebugClassName(name);
pszDebugName = (LPCUTF8)name;
}
LOG((LF_CLASSLOADER, LL_INFO1000, "Loading class \"%s%s%S\" from module \"%ws\" in domain 0x%p %s\n",
*pszDebugNamespace ? pszDebugNamespace : "",
*pszDebugNamespace ? NAMESPACE_SEPARATOR_STR : "",
debugName.GetUnicode(),
pModule->GetDebugName(),
pModule->GetDomain(),
(pModule->IsSystem()) ? "System Domain" : ""
));
#endif // _DEBUG
// If this is mscorlib, then don't perform some sanity checks on the layout
bmtProp->fNoSanityChecks = ((g_pObjectClass == NULL) || pModule == g_pObjectClass->GetModule()) ||
#ifdef FEATURE_READYTORUN
// No sanity checks for ready-to-run compiled images if possible
(pModule->IsReadyToRun() && pModule->GetReadyToRunInfo()->SkipTypeValidation()) ||
#endif
// No sanity checks for real generic instantiations
!bmtGenerics->IsTypicalTypeDefinition();
// Interfaces have a parent class of Object, but we don't really want to inherit all of
// Object's virtual methods, so pretend we don't have a parent class - at the bottom of this
// function we reset the parent class
if (IsInterface())
{
bmtInternal->pType->SetParentType(NULL);
bmtInternal->pParentMT = NULL;
}
unsigned totalDeclaredFieldSize=0;
// Check to see if the class is a valuetype; but we don't want to mark System.Enum
// as a ValueType. To accomplish this, the check takes advantage of the fact
// that System.ValueType and System.Enum are loaded one immediately after the
// other in that order, and so if the parent MethodTable is System.ValueType and
// the System.Enum MethodTable is unset, then we must be building System.Enum and
// so we don't mark it as a ValueType.
if(HasParent() &&
((g_pEnumClass != NULL && GetParentMethodTable() == g_pValueTypeClass) ||
GetParentMethodTable() == g_pEnumClass))
{
bmtProp->fIsValueClass = true;
HRESULT hr = GetMDImport()->GetCustomAttributeByName(bmtInternal->pType->GetTypeDefToken(),
g_CompilerServicesUnsafeValueTypeAttribute,
NULL, NULL);
IfFailThrow(hr);
if (hr == S_OK)
{
SetUnsafeValueClass();
}
hr = GetMDImport()->GetCustomAttributeByName(bmtInternal->pType->GetTypeDefToken(),
g_CompilerServicesIsByRefLikeAttribute,
NULL, NULL);
IfFailThrow(hr);
if (hr == S_OK)
{
bmtFP->fIsByRefLikeType = true;
}
}
// Check to see if the class is an enumeration. No fancy checks like the one immediately
// above for value types are necessary here.
if(HasParent() && GetParentMethodTable() == g_pEnumClass)
{
bmtProp->fIsEnum = true;
// Ensure we don't have generic enums, or at least enums that have a
// different number of type parameters from their enclosing class.
// The goal is to ensure that the enum's values can't depend on the
// type parameters in any way. And we don't see any need for an
// enum to have additional type parameters.
if (bmtGenerics->GetNumGenericArgs() != 0)
{
// Nested enums can have generic type parameters from their enclosing class.
// CLS rules require type parameters to be propogated to nested types.
// Note that class G<T> { enum E { } } will produce "G`1+E<T>".
// We want to disallow class G<T> { enum E<T, U> { } }
// Perhaps the IL equivalent of class G<T> { enum E { } } should be legal.
if (!IsNested())
{
BuildMethodTableThrowException(IDS_CLASSLOAD_ENUM_EXTRA_GENERIC_TYPE_PARAM);
}
mdTypeDef tdEnclosing = mdTypeDefNil;
HRESULT hr = GetMDImport()->GetNestedClassProps(GetCl(), &tdEnclosing);
if (FAILED(hr))
ThrowHR(hr, BFA_UNABLE_TO_GET_NESTED_PROPS);
HENUMInternalHolder hEnumGenericPars(GetMDImport());
if (FAILED(hEnumGenericPars.EnumInitNoThrow(mdtGenericParam, tdEnclosing)))
{
GetAssembly()->ThrowTypeLoadException(GetMDImport(), tdEnclosing, IDS_CLASSLOAD_BADFORMAT);
}
if (hEnumGenericPars.EnumGetCount() != bmtGenerics->GetNumGenericArgs())
{
BuildMethodTableThrowException(IDS_CLASSLOAD_ENUM_EXTRA_GENERIC_TYPE_PARAM);
}
}
}
#if defined(_TARGET_X86_) || defined(_TARGET_AMD64_) || defined(_TARGET_ARM64_)
if (GetModule()->IsSystem() && !bmtGenerics->HasInstantiation())
{
LPCUTF8 className;
LPCUTF8 nameSpace;
HRESULT hr = GetMDImport()->GetNameOfTypeDef(bmtInternal->pType->GetTypeDefToken(), &className, &nameSpace);
#if defined(_TARGET_ARM64_)
// All the funtions in System.Runtime.Intrinsics.Arm.Arm64 are hardware intrinsics.
if (hr == S_OK && strcmp(nameSpace, "System.Runtime.Intrinsics.Arm.Arm64") == 0)
#else
// All the funtions in System.Runtime.Intrinsics.X86 are hardware intrinsics.
if (hr == S_OK && strcmp(nameSpace, "System.Runtime.Intrinsics.X86") == 0)
#endif
{
if (IsCompilationProcess())
{
// Disable AOT compiling for managed implementation of hardware intrinsics in mscorlib.
// We specially treat them here to ensure correct ISA features are set during compilation
COMPlusThrow(kTypeLoadException, IDS_EE_HWINTRINSIC_NGEN_DISALLOWED);
}
bmtProp->fIsHardwareIntrinsic = true;
}
}
#endif
// If this type is marked by [Intrinsic] attribute, it may be specially treated by the runtime/compiler
// Currently, only SIMD types have [Intrinsic] attribute
//
// We check this here fairly early to ensure other downstream checks on these types can be slightly more efficient.
if (GetModule()->IsSystem() || GetAssembly()->IsSIMDVectorAssembly())
{
HRESULT hr = GetMDImport()->GetCustomAttributeByName(bmtInternal->pType->GetTypeDefToken(),
g_CompilerServicesIntrinsicAttribute,
NULL,
NULL);
if (hr == S_OK)
{
bmtProp->fIsIntrinsicType = true;
}
}
// Com Import classes are special. These types must derive from System.Object,
// and we then substitute the parent with System._ComObject.
if (IsComImport() && !IsEnum() && !IsInterface() && !IsValueClass() && !IsDelegate())
{
#ifdef FEATURE_COMINTEROP
// ComImport classes must either extend from Object or be a WinRT class
// that extends from another WinRT class (and so form a chain of WinRT classes
// that ultimately extend from object).
MethodTable* pMTParent = GetParentMethodTable();
if ((pMTParent == NULL) || !(
// is the parent valid?
(pMTParent == g_pObjectClass) ||
(GetHalfBakedClass()->IsProjectedFromWinRT() && pMTParent->IsProjectedFromWinRT())
))
{
BuildMethodTableThrowException(IDS_CLASSLOAD_CANTEXTEND);
}
if (HasLayout())
{
// ComImport classes cannot have layout information.
BuildMethodTableThrowException(IDS_CLASSLOAD_COMIMPCANNOTHAVELAYOUT);
}
if (pMTParent == g_pObjectClass)
{
// ComImport classes ultimately extend from our __ComObject or RuntimeClass class
MethodTable *pCOMMT = NULL;
if (GetHalfBakedClass()->IsProjectedFromWinRT())
pCOMMT = g_pBaseRuntimeClass;
else
pCOMMT = g_pBaseCOMObject;
_ASSERTE(pCOMMT);
// We could have had COM interop classes derive from System._ComObject,
// but instead we have them derive from System.Object, have them set the
// ComImport bit in the type attributes, and then we swap out the parent
// type under the covers.
bmtInternal->pType->SetParentType(CreateTypeChain(pCOMMT, Substitution()));
bmtInternal->pParentMT = pCOMMT;
}
#endif
// if the current class is imported
bmtProp->fIsComObjectType = true;
}
#ifdef FEATURE_COMINTEROP
if (GetHalfBakedClass()->IsProjectedFromWinRT() && IsValueClass() && !IsEnum())
{
// WinRT structures must have sequential layout
if (!GetHalfBakedClass()->HasSequentialLayout())
{
BuildMethodTableThrowException(IDS_EE_STRUCTLAYOUT_WINRT);
}
}
// Check for special COM interop types.
CheckForSpecialTypes();
CheckForTypeEquivalence(cBuildingInterfaceList, pBuildingInterfaceList);
if (HasParent())
{ // Types that inherit from com object types are themselves com object types.
if (GetParentMethodTable()->IsComObjectType())
{
// if the parent class is of ComObjectType
// so is the child
bmtProp->fIsComObjectType = true;
}
#ifdef FEATURE_TYPEEQUIVALENCE
// If your parent is type equivalent then so are you
if (GetParentMethodTable()->HasTypeEquivalence())
{
bmtProp->fHasTypeEquivalence = true;
}
#endif
}
#endif // FEATURE_COMINTEROP
if (!HasParent() && !IsInterface())
{
if(g_pObjectClass != NULL)
{
if(!IsGlobalClass())
{
// Non object derived types that are not the global class are prohibited by spec
BuildMethodTableThrowException(IDS_CLASSLOAD_PARENTNULL);
}
}
}
// NOTE: This appears to be the earliest point during class loading that other classes MUST be loaded
// resolve unresolved interfaces, determine an upper bound on the size of the interface map,
// and determine the size of the largest interface (in # slots)
ResolveInterfaces(cBuildingInterfaceList, pBuildingInterfaceList);
// Enumerate this class's methodImpls
EnumerateMethodImpls();
// Enumerate this class's methods and fields
EnumerateClassMethods();
ValidateMethods();
EnumerateClassFields();
// Import the slots of the parent for use in placing this type's methods.
ImportParentMethods();
// This will allocate the working versions of the VTable and NonVTable in bmtVT
AllocateWorkingSlotTables();
// Allocate a MethodDesc* for each method (needed later when doing interfaces), and a FieldDesc* for each field
AllocateFieldDescs();
// Copy the parent's vtable into the current type's vtable
CopyParentVtable();
bmtVT->pDispatchMapBuilder = new (GetStackingAllocator()) DispatchMapBuilder(GetStackingAllocator());
// Determine vtable placement for each member in this class
PlaceVirtualMethods();
PlaceNonVirtualMethods();
// Allocate MethodDescs (expects methods placed methods)
AllocAndInitMethodDescs();
if (IsInterface())
{
//
// We need to process/place method impls for default interface method overrides.
// We won't build dispatch map for interfaces, though.
//
ProcessMethodImpls();
PlaceMethodImpls();
}
else
{
//
// If we are a class, then there may be some unplaced vtable methods (which are by definition
// interface methods, otherwise they'd already have been placed). Place as many unplaced methods
// as possible, in the order preferred by interfaces. However, do not allow any duplicates - once
// a method has been placed, it cannot be placed again - if we are unable to neatly place an interface,
// create duplicate slots for it starting at dwCurrentDuplicateVtableSlot. Fill out the interface
// map for all interfaces as they are placed.
//
// If we are an interface, then all methods are already placed. Fill out the interface map for
// interfaces as they are placed.
//
ComputeInterfaceMapEquivalenceSet();
PlaceInterfaceMethods();
ProcessMethodImpls();
ProcessInexactMethodImpls();
PlaceMethodImpls();
if (!bmtProp->fNoSanityChecks)
{
// Now that interface method implementation have been fully resolved,
// we need to make sure that type constraints are also met.
ValidateInterfaceMethodConstraints();
}
}
// Verify that we have not overflowed the number of slots.
if (!FitsInU2((UINT64)bmtVT->pSlotTable->GetSlotCount()))
{
BuildMethodTableThrowException(IDS_CLASSLOAD_TOO_MANY_METHODS);
}
// ensure we didn't overflow the temporary vtable
_ASSERTE(bmtVT->pSlotTable->GetSlotCount() <= bmtVT->dwMaxVtableSize);
// Allocate and initialize the dictionary for the type. This will be filled out later
// with the final values.
AllocAndInitDictionary();
////////////////////////////////////////////////////////////////////////////////////////////////
// Fields
//
// We decide here if we need a dynamic entry for our statics. We need it here because
// the offsets of our fields will depend on this. For the dynamic case (which requires
// an extra indirection (indirect depending of methodtable) we'll allocate the slot
// in setupmethodtable
if (((pModule->IsReflection() || bmtGenerics->HasInstantiation() || !pModule->IsStaticStoragePrepared(cl)) &&
(bmtVT->GetClassCtorSlotIndex() != INVALID_SLOT_INDEX || bmtEnumFields->dwNumStaticFields !=0))
#ifdef EnC_SUPPORTED
// Classes in modules that have been edited (would do on class level if there were a
// way to tell if the class had been edited) also have dynamic statics as the number
// of statics might have changed, so can't use the static module-wide storage
|| (pModule->IsEditAndContinueEnabled() &&
((EditAndContinueModule*)pModule)->GetApplyChangesCount() > CorDB_DEFAULT_ENC_FUNCTION_VERSION)
#endif // EnC_SUPPORTED
)
{
// We will need a dynamic id
bmtProp->fDynamicStatics = true;
if (bmtGenerics->HasInstantiation())
{
bmtProp->fGenericsStatics = true;
}
}
// If not NULL, it means there are some by-value fields, and this contains an entry for each instance or static field,
// which is NULL if not a by value field, and points to the EEClass of the field if a by value field. Instance fields
// come first, statics come second.
MethodTable ** pByValueClassCache = NULL;
// Go thru all fields and initialize their FieldDescs.
InitializeFieldDescs(GetApproxFieldDescListRaw(), pLayoutRawFieldInfos, bmtInternal, bmtGenerics,
bmtMetaData, bmtEnumFields, bmtError,
&pByValueClassCache, bmtMFDescs, bmtFP, bmtCSInfo,
&totalDeclaredFieldSize);
// Place regular static fields
PlaceRegularStaticFields();
// Place thread static fields
PlaceThreadStaticFields();
LOG((LF_CODESHARING,
LL_INFO10000,
"Placing %d statics (%d handles) for class %s.\n",
GetNumStaticFields(), GetNumHandleRegularStatics() + GetNumHandleThreadStatics(),
pszDebugName));
if (IsBlittable() || IsManagedSequential())
{
bmtFP->NumGCPointerSeries = 0;
bmtFP->NumInstanceGCPointerFields = 0;
_ASSERTE(HasLayout());
bmtFP->NumInstanceFieldBytes = IsBlittable() ? GetLayoutInfo()->m_cbNativeSize
: GetLayoutInfo()->m_cbManagedSize;
// For simple Blittable types we still need to check if they have any overlapping
// fields and call the method SetHasOverLayedFields() when they are detected.
//
if (HasExplicitFieldOffsetLayout())
{
_ASSERTE(!bmtGenerics->fContainsGenericVariables); // A simple Blittable type can't ever be an open generic type.
HandleExplicitLayout(pByValueClassCache);
}
}
else
{
_ASSERTE(!IsBlittable());
// HandleExplicitLayout fails for the GenericTypeDefinition when
// it will succeed for some particular instantiations.
// Thus we only do explicit layout for real instantiations, e.g. C<int>, not
// the open types such as the GenericTypeDefinition C<!0> or any
// of the "fake" types involving generic type variables which are
// used for reflection and verification, e.g. C<List<!0>>.
//
if (!bmtGenerics->fContainsGenericVariables && HasExplicitFieldOffsetLayout())
{
HandleExplicitLayout(pByValueClassCache);
}
else
{
// Place instance fields
PlaceInstanceFields(pByValueClassCache);
}
}
if (CheckIfSIMDAndUpdateSize())
{
totalDeclaredFieldSize = bmtFP->NumInstanceFieldBytes;
}
// We enforce that all value classes have non-zero size
if (IsValueClass() && bmtFP->NumInstanceFieldBytes == 0)
{
BuildMethodTableThrowException(IDS_CLASSLOAD_ZEROSIZE);
}
if (bmtFP->fHasSelfReferencingStaticValueTypeField_WithRVA)
{ // Verify self-referencing statics with RVA (now when the ValueType size is known)
VerifySelfReferencingStaticValueTypeFields_WithRVA(pByValueClassCache);
}
// Now setup the method table
#ifdef FEATURE_PREJIT
Module *pComputedPZM = pLoaderModule;
if (bmtGenerics->GetNumGenericArgs() > 0)
{
pComputedPZM = Module::ComputePreferredZapModule(pModule, bmtGenerics->GetInstantiation());
}
SetupMethodTable2(pLoaderModule, pComputedPZM);
#else // FEATURE_PREJIT
SetupMethodTable2(pLoaderModule);
#endif // FEATURE_PREJIT
MethodTable * pMT = GetHalfBakedMethodTable();
#ifdef FEATURE_64BIT_ALIGNMENT
if (GetHalfBakedClass()->IsAlign8Candidate())
pMT->SetRequiresAlign8();
#endif
if (bmtGenerics->pVarianceInfo != NULL)
{
pMT->SetHasVariance();
}
if (bmtFP->NumRegularStaticGCBoxedFields != 0)
{
pMT->SetHasBoxedRegularStatics();
}
if (bmtFP->fIsByRefLikeType)
{
pMT->SetIsByRefLike();
}
if (IsValueClass())
{
if (bmtFP->NumInstanceFieldBytes != totalDeclaredFieldSize || HasOverLayedField())
GetHalfBakedClass()->SetIsNotTightlyPacked();
#ifdef FEATURE_HFA
GetHalfBakedClass()->CheckForHFA(pByValueClassCache);
#endif
#ifdef UNIX_AMD64_ABI
#ifdef FEATURE_HFA
#error Can't have FEATURE_HFA and UNIX_AMD64_ABI defined at the same time.
#endif // FEATURE_HFA
SystemVAmd64CheckForPassStructInRegister();
#endif // UNIX_AMD64_ABI
}
#ifdef UNIX_AMD64_ABI
#ifdef FEATURE_HFA
#error Can't have FEATURE_HFA and UNIX_AMD64_ABI defined at the same time.
#endif // FEATURE_HFA
if (HasLayout())
{
SystemVAmd64CheckForPassNativeStructInRegister();
}
#endif // UNIX_AMD64_ABI
#ifdef FEATURE_HFA
if (HasLayout())
{
GetHalfBakedClass()->CheckForNativeHFA();
}
#endif
#ifdef _DEBUG
pMT->SetDebugClassName(GetDebugClassName());
#endif
#ifdef FEATURE_COMINTEROP
if (IsInterface())
{
GetCoClassAttribInfo();
}
#endif // FEATURE_COMINTEROP
if (HasExplicitFieldOffsetLayout())
// Perform relevant GC calculations for tdexplicit
HandleGCForExplicitLayout();
else
// Perform relevant GC calculations for value classes
HandleGCForValueClasses(pByValueClassCache);
// GC reqires the series to be sorted.
// TODO: fix it so that we emit them in the correct order in the first place.
if (pMT->ContainsPointers())
{
CGCDesc* gcDesc = CGCDesc::GetCGCDescFromMT(pMT);
qsort(gcDesc->GetLowestSeries(), (int)gcDesc->GetNumSeries(), sizeof(CGCDescSeries), compareCGCDescSeries);
}
SetFinalizationSemantics();
// Allocate dynamic slot if necessary
if (bmtProp->fDynamicStatics)
{
if (bmtProp->fGenericsStatics)
{
FieldDesc* pStaticFieldDescs = NULL;
if (bmtEnumFields->dwNumStaticFields != 0)
{
pStaticFieldDescs = pMT->GetApproxFieldDescListRaw() + bmtEnumFields->dwNumInstanceFields;
}
pMT->SetupGenericsStaticsInfo(pStaticFieldDescs);
}
else
{
// Get an id for the dynamic class. We store it in the class because
// no class that is persisted in ngen should have it (ie, if the class is ngened
// The id is stored in an optional field so we need to ensure an optional field descriptor has
// been allocated for this EEClass instance.
EnsureOptionalFieldsAreAllocated(GetHalfBakedClass(), m_pAllocMemTracker, pAllocator->GetLowFrequencyHeap());
SetModuleDynamicID(GetModule()->AllocateDynamicEntry(pMT));
}
}
//
// if there are context or thread static set the info in the method table optional members
//
// Check for the RemotingProxy Attribute
// structs with GC pointers MUST be pointer sized aligned because the GC assumes it
if (IsValueClass() && pMT->ContainsPointers() && (bmtFP->NumInstanceFieldBytes % TARGET_POINTER_SIZE != 0))
{
BuildMethodTableThrowException(IDS_CLASSLOAD_BADFORMAT);
}
if (IsInterface())
{
// Reset parent class
pMT->SetParentMethodTable (g_pObjectClass);
}
#ifdef _DEBUG
// Reset the debug method names for BoxedEntryPointStubs
// so they reflect the very best debug information for the methods
{
DeclaredMethodIterator methIt(*this);
while (methIt.Next())
{
if (methIt->GetUnboxedMethodDesc() != NULL)
{
{
MethodDesc *pMD = methIt->GetUnboxedMethodDesc();
StackSString name(SString::Utf8);
TypeString::AppendMethodDebug(name, pMD);
StackScratchBuffer buff;
const char* pDebugNameUTF8 = name.GetUTF8(buff);
S_SIZE_T safeLen = S_SIZE_T(strlen(pDebugNameUTF8)) + S_SIZE_T(1);
if(safeLen.IsOverflow()) COMPlusThrowHR(COR_E_OVERFLOW);
size_t len = safeLen.Value();
pMD->m_pszDebugMethodName = (char*) AllocateFromLowFrequencyHeap(safeLen);
_ASSERTE(pMD->m_pszDebugMethodName);
strcpy_s((char *) pMD->m_pszDebugMethodName, len, pDebugNameUTF8);
}
{
MethodDesc *pMD = methIt->GetMethodDesc();
StackSString name(SString::Utf8);
TypeString::AppendMethodDebug(name, pMD);
StackScratchBuffer buff;
const char* pDebugNameUTF8 = name.GetUTF8(buff);
S_SIZE_T safeLen = S_SIZE_T(strlen(pDebugNameUTF8))+S_SIZE_T(1);
if(safeLen.IsOverflow()) COMPlusThrowHR(COR_E_OVERFLOW);
size_t len = safeLen.Value();
pMD->m_pszDebugMethodName = (char*) AllocateFromLowFrequencyHeap(safeLen);
_ASSERTE(pMD->m_pszDebugMethodName);
strcpy_s((char *) pMD->m_pszDebugMethodName, len, pDebugNameUTF8);
}
}
}
}
#endif // _DEBUG
//If this is a value type, then propagate the UnsafeValueTypeAttribute from
//its instance members to this type.
if (IsValueClass() && !IsUnsafeValueClass())
{
ApproxFieldDescIterator fields(GetHalfBakedMethodTable(),
ApproxFieldDescIterator::INSTANCE_FIELDS );
FieldDesc * current;
while (NULL != (current = fields.Next()))
{
CONSISTENCY_CHECK(!current->IsStatic());
if (current->GetFieldType() == ELEMENT_TYPE_VALUETYPE)
{
TypeHandle th = current->LookupApproxFieldTypeHandle();
CONSISTENCY_CHECK(!th.IsNull());
if (th.AsMethodTable()->GetClass()->IsUnsafeValueClass())
{
SetUnsafeValueClass();
break;
}
}
}
}
#ifdef FEATURE_ICASTABLE
if (!IsValueClass() && g_pICastableInterface != NULL && pMT->CanCastToInterface(g_pICastableInterface))
{
pMT->SetICastable();
}
#endif // FEATURE_ICASTABLE
// Grow the typedef ridmap in advance as we can't afford to
// fail once we set the resolve bit
pModule->EnsureTypeDefCanBeStored(bmtInternal->pType->GetTypeDefToken());
// Grow the tables in advance so that RID map filling cannot fail
// once we're past the commit point.
EnsureRIDMapsCanBeFilled();
{
// NOTE. NOTE!! the EEclass can now be accessed by other threads.
// Do NOT place any initialization after this point.
// You may NOT fail the call after this point.
FAULT_FORBID();
CANNOTTHROWCOMPLUSEXCEPTION();
/*
GetMemTracker()->SuppressRelease();
*/
}
#ifdef _DEBUG
if (g_pConfig->ShouldDumpOnClassLoad(pszDebugName))
{
LOG((LF_ALWAYS, LL_ALWAYS, "Method table summary for '%s':\n", pszDebugName));
LOG((LF_ALWAYS, LL_ALWAYS, "Number of static fields: %d\n", bmtEnumFields->dwNumStaticFields));
LOG((LF_ALWAYS, LL_ALWAYS, "Number of instance fields: %d\n", bmtEnumFields->dwNumInstanceFields));
LOG((LF_ALWAYS, LL_ALWAYS, "Number of static obj ref fields: %d\n", bmtEnumFields->dwNumStaticObjRefFields));
LOG((LF_ALWAYS, LL_ALWAYS, "Number of static boxed fields: %d\n", bmtEnumFields->dwNumStaticBoxedFields));
LOG((LF_ALWAYS, LL_ALWAYS, "Number of declared fields: %d\n", NumDeclaredFields()));
LOG((LF_ALWAYS, LL_ALWAYS, "Number of declared methods: %d\n", NumDeclaredMethods()));
LOG((LF_ALWAYS, LL_ALWAYS, "Number of declared non-abstract methods: %d\n", bmtMethod->dwNumDeclaredNonAbstractMethods));
pMT->Debug_DumpInterfaceMap("Approximate");
pMT->DebugDumpVtable(pszDebugName, FALSE);
pMT->DebugDumpFieldLayout(pszDebugName, FALSE);
pMT->DebugDumpGCDesc(pszDebugName, FALSE);
pMT->Debug_DumpDispatchMap();
}
#endif //_DEBUG
STRESS_LOG3(LF_CLASSLOADER, LL_INFO1000, "MethodTableBuilder: finished method table for module %p token %x = %pT \n",
pModule,
GetCl(),
GetHalfBakedMethodTable());
#ifdef MDA_SUPPORTED
MdaMarshaling* mda = MDA_GET_ASSISTANT(Marshaling);
if (mda && HasLayout())
{
FieldMarshaler *pFieldMarshaler = (FieldMarshaler*)GetLayoutInfo()->GetFieldMarshalers();
UINT numReferenceFields = GetLayoutInfo()->GetNumCTMFields();
while (numReferenceFields--)
{
mda->ReportFieldMarshal(pFieldMarshaler);
((BYTE*&)pFieldMarshaler) += MAXFIELDMARSHALERSIZE;
}
}
#endif // MDA_SUPPORTED
#ifdef FEATURE_PREJIT
_ASSERTE(pComputedPZM == Module::GetPreferredZapModuleForMethodTable(pMT));
#endif // FEATURE_PREJIT
END_INTERIOR_STACK_PROBE;
return GetHalfBakedMethodTable();
} // MethodTableBuilder::BuildMethodTableThrowing
#ifdef _PREFAST_
#pragma warning(pop)
#endif
//---------------------------------------------------------------------------------------
//
// Resolve unresolved interfaces, determine an upper bound on the size of the interface map.
//
VOID
MethodTableBuilder::ResolveInterfaces(
WORD cBuildingInterfaceList,
BuildingInterfaceInfo_t * pBuildingInterfaceList)
{
CONTRACTL
{
STANDARD_VM_CHECK;
PRECONDITION(CheckPointer(this));
PRECONDITION(CheckPointer(bmtAllocator));
PRECONDITION(CheckPointer(bmtInterface));
PRECONDITION(CheckPointer(bmtVT));
PRECONDITION(CheckPointer(bmtParent));
}
CONTRACTL_END;
// resolve unresolved interfaces and determine the size of the largest interface (in # slots)
LoadApproxInterfaceMap();
// Inherit parental slot counts
//@TODO: This doesn't belong here.
if (HasParent())
{
MethodTable * pParentClass = GetParentMethodTable();
PREFIX_ASSUME(pParentClass != NULL);
bmtParent->NumParentPointerSeries = pParentClass->ContainsPointers() ?
(DWORD)CGCDesc::GetCGCDescFromMT(pParentClass)->GetNumSeries() : 0;
if (pParentClass->HasFieldsWhichMustBeInited())
{
SetHasFieldsWhichMustBeInited();
}
#ifdef FEATURE_READYTORUN
if (!(IsValueClass() || (pParentClass == g_pObjectClass)))
{
CheckLayoutDependsOnOtherModules(pParentClass);
}
#endif
}
else
{
bmtParent->NumParentPointerSeries = 0;
}
} // MethodTableBuilder::ResolveInterfaces
//*******************************************************************************
/* static */
int __cdecl MethodTableBuilder::bmtMetaDataInfo::MethodImplTokenPair::Compare(
const void *elem1,
const void *elem2)
{
STATIC_CONTRACT_LEAF;
MethodImplTokenPair *e1 = (MethodImplTokenPair *)elem1;
MethodImplTokenPair *e2 = (MethodImplTokenPair *)elem2;
if (e1->methodBody < e2->methodBody) return -1;
else if (e1->methodBody > e2->methodBody) return 1;
else if (e1->methodDecl < e2->methodDecl) return -1;
else if (e1->methodDecl > e2->methodDecl) return 1;
else return 0;
}
//*******************************************************************************
/* static */
BOOL MethodTableBuilder::bmtMetaDataInfo::MethodImplTokenPair::Equal(
const MethodImplTokenPair *elem1,
const MethodImplTokenPair *elem2)
{
STATIC_CONTRACT_LEAF;
return ((elem1->methodBody == elem2->methodBody) &&
(elem1->methodDecl == elem2->methodDecl));
}
//*******************************************************************************
VOID
MethodTableBuilder::EnumerateMethodImpls()
{
STANDARD_VM_CONTRACT;
HRESULT hr = S_OK;
IMDInternalImport * pMDInternalImport = GetMDImport();
DWORD rid, maxRidMD, maxRidMR;
HENUMInternalMethodImplHolder hEnumMethodImpl(pMDInternalImport);
hr = hEnumMethodImpl.EnumMethodImplInitNoThrow(GetCl());
if (FAILED(hr))
{
BuildMethodTableThrowException(hr, *bmtError);
}
// This gets the count out of the metadata interface.
bmtMethod->dwNumberMethodImpls = hEnumMethodImpl.EnumMethodImplGetCount();
bmtMethod->dwNumberInexactMethodImplCandidates = 0;
// This is the first pass. In this we will simply enumerate the token pairs and fill in
// the data structures. In addition, we'll sort the list and eliminate duplicates.
if (bmtMethod->dwNumberMethodImpls > 0)
{
//
// Allocate the structures to keep track of the token pairs
//
bmtMetaData->rgMethodImplTokens = new (GetStackingAllocator())
bmtMetaDataInfo::MethodImplTokenPair[bmtMethod->dwNumberMethodImpls];
// Iterate through each MethodImpl declared on this class
for (DWORD i = 0; i < bmtMethod->dwNumberMethodImpls; i++)
{
hr = hEnumMethodImpl.EnumMethodImplNext(
&bmtMetaData->rgMethodImplTokens[i].methodBody,
&bmtMetaData->rgMethodImplTokens[i].methodDecl);
bmtMetaData->rgMethodImplTokens[i].fConsiderDuringInexactMethodImplProcessing = false;
bmtMetaData->rgMethodImplTokens[i].fThrowIfUnmatchedDuringInexactMethodImplProcessing = false;
bmtMetaData->rgMethodImplTokens[i].interfaceEquivalenceSet = 0;
if (FAILED(hr))
{
BuildMethodTableThrowException(hr, *bmtError);
}
// Grab the next set of body/decl tokens
if (hr == S_FALSE)
{
// In the odd case that the enumerator fails before we've reached the total reported
// entries, let's reset the count and just break out. (Should we throw?)
bmtMethod->dwNumberMethodImpls = i;
break;
}
}
// No need to do any sorting or duplicate elimination if there's not two or more methodImpls
if (bmtMethod->dwNumberMethodImpls > 1)
{
// Now sort
qsort(bmtMetaData->rgMethodImplTokens,
bmtMethod->dwNumberMethodImpls,
sizeof(bmtMetaDataInfo::MethodImplTokenPair),
&bmtMetaDataInfo::MethodImplTokenPair::Compare);
// Now eliminate duplicates
for (DWORD i = 0; i < bmtMethod->dwNumberMethodImpls - 1; i++)
{
CONSISTENCY_CHECK((i + 1) < bmtMethod->dwNumberMethodImpls);
bmtMetaDataInfo::MethodImplTokenPair *e1 = &bmtMetaData->rgMethodImplTokens[i];
bmtMetaDataInfo::MethodImplTokenPair *e2 = &bmtMetaData->rgMethodImplTokens[i + 1];
// If the pair are equal, eliminate the first one, and reduce the total count by one.
if (bmtMetaDataInfo::MethodImplTokenPair::Equal(e1, e2))
{
DWORD dwCopyNum = bmtMethod->dwNumberMethodImpls - (i + 1);
memcpy(e1, e2, dwCopyNum * sizeof(bmtMetaDataInfo::MethodImplTokenPair));
bmtMethod->dwNumberMethodImpls--;
CONSISTENCY_CHECK(bmtMethod->dwNumberMethodImpls > 0);
}
}
}
}
if (bmtMethod->dwNumberMethodImpls != 0)
{
//
// Allocate the structures to keep track of the impl matches
//
bmtMetaData->pMethodDeclSubsts = new (GetStackingAllocator())
Substitution[bmtMethod->dwNumberMethodImpls];
// These are used for verification
maxRidMD = pMDInternalImport->GetCountWithTokenKind(mdtMethodDef);
maxRidMR = pMDInternalImport->GetCountWithTokenKind(mdtMemberRef);
// Iterate through each MethodImpl declared on this class
for (DWORD i = 0; i < bmtMethod->dwNumberMethodImpls; i++)
{
PCCOR_SIGNATURE pSigDecl = NULL;
PCCOR_SIGNATURE pSigBody = NULL;
ULONG cbSigDecl;
ULONG cbSigBody;
mdToken tkParent;
mdToken theBody, theDecl;
Substitution theDeclSubst(GetModule(), SigPointer(), NULL); // this can get updated later below.
theBody = bmtMetaData->rgMethodImplTokens[i].methodBody;
theDecl = bmtMetaData->rgMethodImplTokens[i].methodDecl;
// IMPLEMENTATION LIMITATION: currently, we require that the body of a methodImpl
// belong to the current type. This is because we need to allocate a different
// type of MethodDesc for bodies that are part of methodImpls.
if (TypeFromToken(theBody) != mdtMethodDef)
{
hr = FindMethodDeclarationForMethodImpl(
theBody,
&theBody,
TRUE);
if (FAILED(hr))
{
BuildMethodTableThrowException(hr, IDS_CLASSLOAD_MI_ILLEGAL_BODY, mdMethodDefNil);
}
// Make sure to update the stored token with the resolved token.
bmtMetaData->rgMethodImplTokens[i].methodBody = theBody;
}
if (TypeFromToken(theBody) != mdtMethodDef)
{
BuildMethodTableThrowException(BFA_METHODDECL_NOT_A_METHODDEF);
}
CONSISTENCY_CHECK(theBody == bmtMetaData->rgMethodImplTokens[i].methodBody);
//
// Now that the tokens of Decl and Body are obtained, do the MD validation
//
rid = RidFromToken(theDecl);
// Perform initial rudimentary validation of the token. Full token verification
// will be done in TestMethodImpl when placing the methodImpls.
if (TypeFromToken(theDecl) == mdtMethodDef)
{
// Decl must be valid token
if ((rid == 0) || (rid > maxRidMD))
{
BuildMethodTableThrowException(IDS_CLASSLOAD_MI_ILLEGAL_TOKEN_DECL);
}
// Get signature and length
if (FAILED(pMDInternalImport->GetSigOfMethodDef(theDecl, &cbSigDecl, &pSigDecl)))
{
BuildMethodTableThrowException(IDS_CLASSLOAD_BADFORMAT);
}
}
// The token is not a MethodDef (likely a MemberRef)
else
{
// Decl must be valid token
if ((TypeFromToken(theDecl) != mdtMemberRef) || (rid == 0) || (rid > maxRidMR))
{
bmtError->resIDWhy = IDS_CLASSLOAD_MI_ILLEGAL_TOKEN_DECL;
BuildMethodTableThrowException(IDS_CLASSLOAD_MI_ILLEGAL_TOKEN_DECL);
}
// Get signature and length
LPCSTR szDeclName;
if (FAILED(pMDInternalImport->GetNameAndSigOfMemberRef(theDecl, &pSigDecl, &cbSigDecl, &szDeclName)))
{
BuildMethodTableThrowException(IDS_CLASSLOAD_BADFORMAT);
}
// Get parent
hr = pMDInternalImport->GetParentToken(theDecl,&tkParent);
if (FAILED(hr))
BuildMethodTableThrowException(hr, *bmtError);
theDeclSubst = Substitution(tkParent, GetModule(), NULL);
}
// Perform initial rudimentary validation of the token. Full token verification
// will be done in TestMethodImpl when placing the methodImpls.
{
// Body must be valid token
rid = RidFromToken(theBody);
if ((rid == 0)||(rid > maxRidMD))
{
BuildMethodTableThrowException(IDS_CLASSLOAD_MI_ILLEGAL_TOKEN_BODY);
}
// Body's parent must be this class
hr = pMDInternalImport->GetParentToken(theBody,&tkParent);
if (FAILED(hr))
BuildMethodTableThrowException(hr, *bmtError);
if(tkParent != GetCl())
{
BuildMethodTableThrowException(IDS_CLASSLOAD_MI_ILLEGAL_BODY);
}
}
// Decl's and Body's signatures must match
if(pSigDecl && cbSigDecl)
{
if (FAILED(pMDInternalImport->GetSigOfMethodDef(theBody, &cbSigBody, &pSigBody)) ||
(pSigBody == NULL) ||
(cbSigBody == 0))
{
BuildMethodTableThrowException(IDS_CLASSLOAD_MI_MISSING_SIG_BODY);
}
// Can't use memcmp because there may be two AssemblyRefs
// in this scope, pointing to the same assembly, etc.).
if (!MetaSig::CompareMethodSigs(
pSigDecl,
cbSigDecl,
GetModule(),
&theDeclSubst,
pSigBody,
cbSigBody,
GetModule(),
NULL))
{
BuildMethodTableThrowException(IDS_CLASSLOAD_MI_BODY_DECL_MISMATCH);
}
}
else
{
BuildMethodTableThrowException(IDS_CLASSLOAD_MI_MISSING_SIG_DECL);
}
bmtMetaData->pMethodDeclSubsts[i] = theDeclSubst;
}
}
} // MethodTableBuilder::EnumerateMethodImpls
//*******************************************************************************
//
// Find a method declaration that must reside in the scope passed in. This method cannot be called if
// the reference travels to another scope.
//
// Protect against finding a declaration that lives within
// us (the type being created)
//
HRESULT MethodTableBuilder::FindMethodDeclarationForMethodImpl(
mdToken pToken, // Token that is being located (MemberRef or MemberDef)
mdToken* pDeclaration, // [OUT] Method definition for Member
BOOL fSameClass) // Does the declaration need to be in this class
{
STANDARD_VM_CONTRACT;
HRESULT hr = S_OK;
IMDInternalImport *pMDInternalImport = GetMDImport();
PCCOR_SIGNATURE pSig; // Signature of Member
DWORD cSig;
LPCUTF8 szMember = NULL;
// The token should be a member ref or def. If it is a ref then we need to travel
// back to us hopefully.
if(TypeFromToken(pToken) == mdtMemberRef)
{
// Get the parent
mdToken typeref;
if (FAILED(pMDInternalImport->GetParentOfMemberRef(pToken, &typeref)))
{
BAD_FORMAT_NOTHROW_ASSERT(!"Invalid MemberRef record");
IfFailRet(COR_E_TYPELOAD);
}
GOTPARENT:
if (TypeFromToken(typeref) == mdtMethodDef)
{ // If parent is a method def then this is a varags method
mdTypeDef typeDef;
hr = pMDInternalImport->GetParentToken(typeref, &typeDef);
if (TypeFromToken(typeDef) != mdtTypeDef)
{ // A mdtMethodDef must be parented by a mdtTypeDef
BAD_FORMAT_NOTHROW_ASSERT(!"MethodDef without TypeDef as Parent");
IfFailRet(COR_E_TYPELOAD);
}
BAD_FORMAT_NOTHROW_ASSERT(typeDef == GetCl());
// This is the real method we are overriding
*pDeclaration = typeref;
}
else if (TypeFromToken(typeref) == mdtTypeSpec)
{ // Added so that method impls can refer to instantiated interfaces or classes
if (FAILED(pMDInternalImport->GetSigFromToken(typeref, &cSig, &pSig)))
{
BAD_FORMAT_NOTHROW_ASSERT(!"Invalid TypeSpec record");
IfFailRet(COR_E_TYPELOAD);
}
CorElementType elemType = (CorElementType) *pSig++;
if (elemType == ELEMENT_TYPE_GENERICINST)
{ // If this is a generic inst, we expect that the next elem is ELEMENT_TYPE_CLASS,
// which is handled in the case below.
elemType = (CorElementType) *pSig++;
BAD_FORMAT_NOTHROW_ASSERT(elemType == ELEMENT_TYPE_CLASS);
}
if (elemType == ELEMENT_TYPE_CLASS)
{ // This covers E_T_GENERICINST and E_T_CLASS typespec formats. We don't expect
// any other kinds to come through here.
CorSigUncompressToken(pSig, &typeref);
}
else
{ // This is an unrecognized signature format.
BuildMethodTableThrowException(COR_E_BADIMAGEFORMAT,
IDS_CLASSLOAD_MI_BAD_SIG,
mdMethodDefNil);
}
goto GOTPARENT;
}
else
{ // Verify that the ref points back to us
mdToken tkDef = mdTokenNil;
if (TypeFromToken(typeref) == mdtTypeRef)
{ // We only get here when we know the token does not reference a type in a different scope.
LPCUTF8 pszNameSpace;
LPCUTF8 pszClassName;
if (FAILED(pMDInternalImport->GetNameOfTypeRef(typeref, &pszNameSpace, &pszClassName)))
{
IfFailRet(COR_E_TYPELOAD);
}
mdToken tkRes;
if (FAILED(pMDInternalImport->GetResolutionScopeOfTypeRef(typeref, &tkRes)))
{
IfFailRet(COR_E_TYPELOAD);
}
hr = pMDInternalImport->FindTypeDef(pszNameSpace,
pszClassName,
(TypeFromToken(tkRes) == mdtTypeRef) ? tkRes : mdTokenNil,
&tkDef);
if (FAILED(hr))
{
IfFailRet(COR_E_TYPELOAD);
}
}
else if (TypeFromToken(typeref) == mdtTypeDef)
{ // We get a typedef when the parent of the token is a typespec to the type.
tkDef = typeref;
}
else
{
CONSISTENCY_CHECK_MSGF(FALSE, ("Invalid methodimpl signature in class %s.", GetDebugClassName()));
BuildMethodTableThrowException(COR_E_BADIMAGEFORMAT,
IDS_CLASSLOAD_MI_BAD_SIG,
mdMethodDefNil);
}
if (fSameClass && tkDef != GetCl())
{ // If we required that the typedef be the same type as the current class,
// and it doesn't match, we need to return a failure result.
IfFailRet(COR_E_TYPELOAD);
}
IfFailRet(pMDInternalImport->GetNameAndSigOfMemberRef(pToken, &pSig, &cSig, &szMember));
if (isCallConv(
MetaSig::GetCallingConvention(GetModule(), Signature(pSig, cSig)),
IMAGE_CEE_CS_CALLCONV_FIELD))
{
return VLDTR_E_MR_BADCALLINGCONV;
}
hr = pMDInternalImport->FindMethodDef(
tkDef, szMember, pSig, cSig, pDeclaration);
IfFailRet(hr);
}
}
else if (TypeFromToken(pToken) == mdtMethodDef)
{
mdTypeDef typeDef;
// Verify that we are the parent
hr = pMDInternalImport->GetParentToken(pToken, &typeDef);
IfFailRet(hr);
if(typeDef != GetCl())
{
IfFailRet(COR_E_TYPELOAD);
}
*pDeclaration = pToken;
}
else
{
IfFailRet(COR_E_TYPELOAD);
}
return hr;
}
#ifdef _PREFAST_
#pragma warning(push)
#pragma warning(disable:21000) // Suppress PREFast warning about overly large function
#endif // _PREFAST_
//---------------------------------------------------------------------------------------
//
// Used by BuildMethodTable
//
// Enumerate this class's members
//
VOID
MethodTableBuilder::EnumerateClassMethods()
{
CONTRACTL
{
STANDARD_VM_CHECK;
PRECONDITION(CheckPointer(bmtInternal));
PRECONDITION(CheckPointer(bmtEnumFields));
PRECONDITION(CheckPointer(bmtMFDescs));
PRECONDITION(CheckPointer(bmtProp));
PRECONDITION(CheckPointer(bmtMetaData));
PRECONDITION(CheckPointer(bmtVT));
PRECONDITION(CheckPointer(bmtError));
}
CONTRACTL_END;
HRESULT hr = S_OK;
DWORD i;
IMDInternalImport *pMDInternalImport = GetMDImport();
mdToken tok;
DWORD dwMemberAttrs;
BOOL fIsClassEnum = IsEnum();
BOOL fIsClassInterface = IsInterface();
BOOL fIsClassValueType = IsValueClass();
BOOL fIsClassComImport = IsComImport();
BOOL fIsClassNotAbstract = (IsTdAbstract(GetAttrClass()) == 0);
PCCOR_SIGNATURE pMemberSignature;
ULONG cMemberSignature;
//
// Run through the method list and calculate the following:
// # methods.
// # "other" methods (i.e. static or private)
// # non-other methods
//
HENUMInternalHolder hEnumMethod(pMDInternalImport);
hr = hEnumMethod.EnumInitNoThrow(mdtMethodDef, GetCl());
if (FAILED(hr))
{
BuildMethodTableThrowException(hr, *bmtError);
}
// Allocate an array to contain the method tokens as well as information about the methods.
DWORD cMethAndGaps = hEnumMethod.EnumGetCount();
if ((DWORD)MAX_SLOT_INDEX <= cMethAndGaps)
BuildMethodTableThrowException(IDS_CLASSLOAD_TOO_MANY_METHODS);
bmtMethod->m_cMaxDeclaredMethods = (SLOT_INDEX)cMethAndGaps;
bmtMethod->m_cDeclaredMethods = 0;
bmtMethod->m_rgDeclaredMethods = new (GetStackingAllocator())
bmtMDMethod *[bmtMethod->m_cMaxDeclaredMethods];
enum { SeenCtor = 1, SeenInvoke = 2, SeenBeginInvoke = 4, SeenEndInvoke = 8};
unsigned delegateMethodsSeen = 0;
for (i = 0; i < cMethAndGaps; i++)
{
ULONG dwMethodRVA;
DWORD dwImplFlags;
METHOD_TYPE type;
METHOD_IMPL_TYPE implType;
LPSTR strMethodName;
#ifdef FEATURE_TYPEEQUIVALENCE
// TypeEquivalent structs must not have methods
if (bmtProp->fIsTypeEquivalent && fIsClassValueType)
{
BuildMethodTableThrowException(IDS_CLASSLOAD_EQUIVALENTSTRUCTMETHODS);
}
#endif
//
// Go to the next method and retrieve its attributes.
//
hEnumMethod.EnumNext(&tok);
DWORD rid = RidFromToken(tok);
if ((rid == 0)||(rid > pMDInternalImport->GetCountWithTokenKind(mdtMethodDef)))
{
BuildMethodTableThrowException(BFA_METHOD_TOKEN_OUT_OF_RANGE);
}
if (FAILED(pMDInternalImport->GetMethodDefProps(tok, &dwMemberAttrs)))
{
BuildMethodTableThrowException(IDS_CLASSLOAD_BADFORMAT);
}
if (IsMdRTSpecialName(dwMemberAttrs) || IsMdVirtual(dwMemberAttrs) || IsDelegate())
{
if (FAILED(pMDInternalImport->GetNameOfMethodDef(tok, (LPCSTR *)&strMethodName)))
{
BuildMethodTableThrowException(IDS_CLASSLOAD_BADFORMAT);
}
if(IsStrLongerThan(strMethodName,MAX_CLASS_NAME))
{
BuildMethodTableThrowException(BFA_METHOD_NAME_TOO_LONG);
}
}
else
{
strMethodName = NULL;
}
DWORD numGenericMethodArgs = 0;
{
HENUMInternalHolder hEnumTyPars(pMDInternalImport);
hr = hEnumTyPars.EnumInitNoThrow(mdtGenericParam, tok);
if (FAILED(hr))
{
BuildMethodTableThrowException(hr, *bmtError);
}
numGenericMethodArgs = hEnumTyPars.EnumGetCount();
// We do not want to support context-bound objects with generic methods.
if (numGenericMethodArgs != 0)
{
HENUMInternalHolder hEnumGenericPars(pMDInternalImport);
hEnumGenericPars.EnumInit(mdtGenericParam, tok);
for (unsigned methIdx = 0; methIdx < numGenericMethodArgs; methIdx++)
{
mdGenericParam tkTyPar;
pMDInternalImport->EnumNext(&hEnumGenericPars, &tkTyPar);
DWORD flags;
if (FAILED(pMDInternalImport->GetGenericParamProps(tkTyPar, NULL, &flags, NULL, NULL, NULL)))
{
BuildMethodTableThrowException(IDS_CLASSLOAD_BADFORMAT);
}
if (0 != (flags & ~(gpVarianceMask | gpSpecialConstraintMask)))
{
BuildMethodTableThrowException(IDS_CLASSLOAD_BADFORMAT);
}
switch (flags & gpVarianceMask)
{
case gpNonVariant:
break;
case gpCovariant: // intentional fallthru
case gpContravariant:
BuildMethodTableThrowException(VLDTR_E_GP_ILLEGAL_VARIANT_MVAR);
break;
default:
BuildMethodTableThrowException(IDS_CLASSLOAD_BADFORMAT);
}
}
}
}
//
// We need to check if there are any gaps in the vtable. These are
// represented by methods with the mdSpecial flag and a name of the form
// _VTblGap_nnn (to represent nnn empty slots) or _VTblGap (to represent a
// single empty slot).
//
if (IsMdRTSpecialName(dwMemberAttrs))
{
PREFIX_ASSUME(strMethodName != NULL); // if we've gotten here we've called GetNameOfMethodDef
// The slot is special, but it might not be a vtable spacer. To
// determine that we must look at the name.
if (strncmp(strMethodName, "_VtblGap", 8) == 0)
{
//
// This slot doesn't really exist, don't add it to the method
// table. Instead it represents one or more empty slots, encoded
// in the method name. Locate the beginning of the count in the
// name. There are these points to consider:
// There may be no count present at all (in which case the
// count is taken as one).
// There may be an additional count just after Gap but before
// the '_'. We ignore this.
//
LPCSTR pos = strMethodName + 8;
// Skip optional number.
while (IS_DIGIT(*pos))
pos++;
WORD n = 0;
// Check for presence of count.
if (*pos == '\0')
n = 1;
else
{
if (*pos != '_')
{
BuildMethodTableThrowException(COR_E_BADIMAGEFORMAT,
IDS_CLASSLOAD_BADSPECIALMETHOD,
tok);
}
// Skip '_'.
pos++;
// Read count.
bool fReadAtLeastOneDigit = false;
while (IS_DIGIT(*pos))
{
_ASSERTE(n < 6552);
n *= 10;
n += DIGIT_TO_INT(*pos);
pos++;
fReadAtLeastOneDigit = true;
}
// Check for end of name.
if (*pos != '\0' || !fReadAtLeastOneDigit)
{
BuildMethodTableThrowException(COR_E_BADIMAGEFORMAT,
IDS_CLASSLOAD_BADSPECIALMETHOD,
tok);
}
}
#ifdef FEATURE_COMINTEROP
// Record vtable gap in mapping list. The map is an optional field, so ensure we've allocated
// these fields first.
EnsureOptionalFieldsAreAllocated(GetHalfBakedClass(), m_pAllocMemTracker, GetLoaderAllocator()->GetLowFrequencyHeap());
if (GetHalfBakedClass()->GetSparseCOMInteropVTableMap() == NULL)
GetHalfBakedClass()->SetSparseCOMInteropVTableMap(new SparseVTableMap());
GetHalfBakedClass()->GetSparseCOMInteropVTableMap()->RecordGap((WORD)NumDeclaredMethods(), n);
bmtProp->fSparse = true;
#endif // FEATURE_COMINTEROP
continue;
}
}
//
// This is a real method so add it to the enumeration of methods. We now need to retrieve
// information on the method and store it for later use.
//
if (FAILED(pMDInternalImport->GetMethodImplProps(tok, &dwMethodRVA, &dwImplFlags)))
{
BuildMethodTableThrowException(
COR_E_BADIMAGEFORMAT,
IDS_CLASSLOAD_BADSPECIALMETHOD,
tok);
}
//
// But first - minimal flags validity checks
//
// No methods in Enums!
if (fIsClassEnum)
{
BuildMethodTableThrowException(BFA_METHOD_IN_A_ENUM);
}
// RVA : 0
if (dwMethodRVA != 0)
{
if(fIsClassComImport)
{
BuildMethodTableThrowException(BFA_METHOD_WITH_NONZERO_RVA);
}
if(IsMdAbstract(dwMemberAttrs))
{
BuildMethodTableThrowException(BFA_ABSTRACT_METHOD_WITH_RVA);
}
if(IsMiRuntime(dwImplFlags))
{
BuildMethodTableThrowException(BFA_RUNTIME_METHOD_WITH_RVA);
}
if(IsMiInternalCall(dwImplFlags))
{
BuildMethodTableThrowException(BFA_INTERNAL_METHOD_WITH_RVA);
}
}
// Abstract / not abstract
if(IsMdAbstract(dwMemberAttrs))
{
if(fIsClassNotAbstract)
{
BuildMethodTableThrowException(BFA_AB_METHOD_IN_AB_CLASS);
}
if(!IsMdVirtual(dwMemberAttrs))
{
BuildMethodTableThrowException(BFA_NONVIRT_AB_METHOD);
}
}
else if(fIsClassInterface)
{
if (IsMdRTSpecialName(dwMemberAttrs))
{
CONSISTENCY_CHECK(CheckPointer(strMethodName));
if (strcmp(strMethodName, COR_CCTOR_METHOD_NAME))
{
BuildMethodTableThrowException(BFA_NONAB_NONCCTOR_METHOD_ON_INT);
}
}
}
// Virtual / not virtual
if(IsMdVirtual(dwMemberAttrs))
{
if(IsMdPinvokeImpl(dwMemberAttrs))
{
BuildMethodTableThrowException(BFA_VIRTUAL_PINVOKE_METHOD);
}
if(IsMdStatic(dwMemberAttrs))
{
BuildMethodTableThrowException(BFA_VIRTUAL_STATIC_METHOD);
}
if(strMethodName && (0==strcmp(strMethodName, COR_CTOR_METHOD_NAME)))
{
BuildMethodTableThrowException(BFA_VIRTUAL_INSTANCE_CTOR);
}
}
#ifndef FEATURE_DEFAULT_INTERFACES
// Some interface checks.
if (fIsClassInterface)
{
if (IsMdVirtual(dwMemberAttrs))
{
if (!IsMdAbstract(dwMemberAttrs))
{
BuildMethodTableThrowException(BFA_VIRTUAL_NONAB_INT_METHOD);
}
}
else
{
// Instance field/method
if (!IsMdStatic(dwMemberAttrs))
{
BuildMethodTableThrowException(BFA_NONVIRT_INST_INT_METHOD);
}
}
}
#endif
// No synchronized methods in ValueTypes
if(fIsClassValueType && IsMiSynchronized(dwImplFlags))
{
BuildMethodTableThrowException(BFA_SYNC_METHOD_IN_VT);
}
// Global methods:
if(IsGlobalClass())
{
if(!IsMdStatic(dwMemberAttrs))
{
BuildMethodTableThrowException(BFA_NONSTATIC_GLOBAL_METHOD);
}
if (strMethodName) //<TODO>@todo: investigate mc++ generating null name</TODO>
{
if(0==strcmp(strMethodName, COR_CTOR_METHOD_NAME))
{
BuildMethodTableThrowException(BFA_GLOBAL_INST_CTOR);
}
}
}
//@GENERICS:
// Generic methods or methods in generic classes
// may not be part of a COM Import class (except for WinRT), PInvoke, internal call outside mscorlib.
if ((bmtGenerics->GetNumGenericArgs() != 0 || numGenericMethodArgs != 0) &&
(
#ifdef FEATURE_COMINTEROP
fIsClassComImport ||
bmtProp->fComEventItfType ||
#endif // FEATURE_COMINTEROP
IsMdPinvokeImpl(dwMemberAttrs) ||
(IsMiInternalCall(dwImplFlags) && !GetModule()->IsSystem())))
{
#ifdef FEATURE_COMINTEROP
if (!GetHalfBakedClass()->IsProjectedFromWinRT())
#endif // FEATURE_COMINTEROP
{
BuildMethodTableThrowException(BFA_BAD_PLACE_FOR_GENERIC_METHOD);
}
}
// Generic methods may not be marked "runtime". However note that
// methods in generic delegate classes are, hence we don't apply this to
// methods in generic classes in general.
if (numGenericMethodArgs != 0 && IsMiRuntime(dwImplFlags))
{
BuildMethodTableThrowException(BFA_GENERIC_METHOD_RUNTIME_IMPL);
}
// Signature validation
if (FAILED(pMDInternalImport->GetSigOfMethodDef(tok, &cMemberSignature, &pMemberSignature)))
{
BuildMethodTableThrowException(hr, BFA_BAD_SIGNATURE, mdMethodDefNil);
}
hr = validateTokenSig(tok,pMemberSignature,cMemberSignature,dwMemberAttrs,pMDInternalImport);
if (FAILED(hr))
{
BuildMethodTableThrowException(hr, BFA_BAD_SIGNATURE, mdMethodDefNil);
}
// Check the appearance of covariant and contravariant in the method signature
// Note that variance is only supported for interfaces
if (bmtGenerics->pVarianceInfo != NULL)
{
SigPointer sp(pMemberSignature, cMemberSignature);
ULONG callConv;
IfFailThrow(sp.GetCallingConvInfo(&callConv));
if (callConv & IMAGE_CEE_CS_CALLCONV_GENERIC)
IfFailThrow(sp.GetData(NULL));
DWORD numArgs;
IfFailThrow(sp.GetData(&numArgs));
// Return type behaves covariantly
if (!EEClass::CheckVarianceInSig(
bmtGenerics->GetNumGenericArgs(),
bmtGenerics->pVarianceInfo,
GetModule(),
sp,
gpCovariant))
{
BuildMethodTableThrowException(IDS_CLASSLOAD_VARIANCE_IN_METHOD_RESULT, tok);
}
IfFailThrow(sp.SkipExactlyOne());
for (DWORD j = 0; j < numArgs; j++)
{
// Argument types behave contravariantly
if (!EEClass::CheckVarianceInSig(bmtGenerics->GetNumGenericArgs(),
bmtGenerics->pVarianceInfo,
GetModule(),
sp,
gpContravariant))
{
BuildMethodTableThrowException(IDS_CLASSLOAD_VARIANCE_IN_METHOD_ARG, tok);
}
IfFailThrow(sp.SkipExactlyOne());
}
}
//
// Determine the method's type
//
if (IsReallyMdPinvokeImpl(dwMemberAttrs) || IsMiInternalCall(dwImplFlags))
{
hr = NDirect::HasNAT_LAttribute(pMDInternalImport, tok, dwMemberAttrs);
// There was a problem querying for the attribute
if (FAILED(hr))
{
BuildMethodTableThrowException(hr, IDS_CLASSLOAD_BADPINVOKE, tok);
}
// The attribute is not present
if (hr == S_FALSE)
{
#ifdef FEATURE_COMINTEROP
if (fIsClassComImport
|| GetHalfBakedClass()->IsProjectedFromWinRT()
|| bmtProp->fComEventItfType
)
{
// ComImport classes have methods which are just used
// for implementing all interfaces the class supports
type = METHOD_TYPE_COMINTEROP;
// constructor is special
if (IsMdRTSpecialName(dwMemberAttrs))
{
// Note: Method name (.ctor) will be checked in code:ValidateMethods
// WinRT ctors are interop calls via stubs
if (!GetHalfBakedClass()->IsProjectedFromWinRT())
{
// Ctor on a non-WinRT class
type = METHOD_TYPE_FCALL;
}
}
}
else
#endif //FEATURE_COMINTEROP
if (dwMethodRVA == 0)
{
type = METHOD_TYPE_FCALL;
}
else
{
type = METHOD_TYPE_NDIRECT;
}
}
// The NAT_L attribute is present, marking this method as NDirect
else
{
CONSISTENCY_CHECK(hr == S_OK);
type = METHOD_TYPE_NDIRECT;
}
}
else if (IsMiRuntime(dwImplFlags))
{
// currently the only runtime implemented functions are delegate instance methods
if (!IsDelegate() || IsMdStatic(dwMemberAttrs) || IsMdAbstract(dwMemberAttrs))
{
BuildMethodTableThrowException(BFA_BAD_RUNTIME_IMPL);
}
unsigned newDelegateMethodSeen = 0;
if (IsMdRTSpecialName(dwMemberAttrs)) // .ctor
{
if (strcmp(strMethodName, COR_CTOR_METHOD_NAME) != 0 || IsMdVirtual(dwMemberAttrs))
{
BuildMethodTableThrowException(BFA_BAD_FLAGS_ON_DELEGATE);
}
newDelegateMethodSeen = SeenCtor;
type = METHOD_TYPE_FCALL;
}
else
{
if (strcmp(strMethodName, "Invoke") == 0)
newDelegateMethodSeen = SeenInvoke;
else if (strcmp(strMethodName, "BeginInvoke") == 0)
newDelegateMethodSeen = SeenBeginInvoke;
else if (strcmp(strMethodName, "EndInvoke") == 0)
newDelegateMethodSeen = SeenEndInvoke;
else
{
BuildMethodTableThrowException(BFA_UNKNOWN_DELEGATE_METHOD);
}
type = METHOD_TYPE_EEIMPL;
}
// If we get here we have either set newDelegateMethodSeen or we have thrown a BMT exception
_ASSERTE(newDelegateMethodSeen != 0);
if ((delegateMethodsSeen & newDelegateMethodSeen) != 0)
{
BuildMethodTableThrowException(BFA_DUPLICATE_DELEGATE_METHOD);
}
delegateMethodsSeen |= newDelegateMethodSeen;
}
else if (numGenericMethodArgs != 0)
{
//We use an instantiated method desc to represent a generic method
type = METHOD_TYPE_INSTANTIATED;
}
else if (fIsClassInterface)
{
#ifdef FEATURE_COMINTEROP
if (IsMdStatic(dwMemberAttrs))
{
// Static methods in interfaces need nothing special.
type = METHOD_TYPE_NORMAL;
}
else if (bmtGenerics->GetNumGenericArgs() != 0 &&
(bmtGenerics->fSharedByGenericInstantiations || (!bmtProp->fIsRedirectedInterface && !GetHalfBakedClass()->IsProjectedFromWinRT())))
{
// Methods in instantiated interfaces need nothing special - they are not visible from COM etc.
// mcComInterop is only useful for unshared instantiated WinRT interfaces. If the interface is
// shared by multiple instantiations, the MD would be useless for interop anyway.
type = METHOD_TYPE_NORMAL;
}
else if (bmtProp->fIsMngStandardItf)
{
// If the interface is a standard managed interface then allocate space for an FCall method desc.
type = METHOD_TYPE_FCALL;
}
else if (IsMdAbstract(dwMemberAttrs))
{
// If COM interop is supported then all other interface MDs may be
// accessed via COM interop. mcComInterop MDs have an additional
// pointer-sized field pointing to COM interop data which are
// allocated lazily when/if the MD actually gets used for interop.
type = METHOD_TYPE_COMINTEROP;
}
else
#endif // !FEATURE_COMINTEROP
{
// This codepath is used by remoting
type = METHOD_TYPE_NORMAL;
}
}
else
{
type = METHOD_TYPE_NORMAL;
}
// PInvoke methods are not permitted on collectible types
if ((type == METHOD_TYPE_NDIRECT) && GetAssembly()->IsCollectible())
{
BuildMethodTableThrowException(IDS_CLASSLOAD_COLLECTIBLEPINVOKE);
}
// Generic methods should always be METHOD_TYPE_INSTANTIATED
if ((numGenericMethodArgs != 0) && (type != METHOD_TYPE_INSTANTIATED))
{
BuildMethodTableThrowException(BFA_GENERIC_METHODS_INST);
}
// count how many overrides this method does All methods bodies are defined
// on this type so we can just compare the tok with the body token found
// from the overrides.
implType = METHOD_IMPL_NOT;
for (DWORD impls = 0; impls < bmtMethod->dwNumberMethodImpls; impls++)
{
if (bmtMetaData->rgMethodImplTokens[impls].methodBody == tok)
{
implType = METHOD_IMPL;
break;
}
}
// For delegates we don't allow any non-runtime implemented bodies
// for any of the four special methods
if (IsDelegate() && !IsMiRuntime(dwImplFlags))
{
if ((strcmp(strMethodName, COR_CTOR_METHOD_NAME) == 0) ||
(strcmp(strMethodName, "Invoke") == 0) ||
(strcmp(strMethodName, "BeginInvoke") == 0) ||
(strcmp(strMethodName, "EndInvoke") == 0) )
{
BuildMethodTableThrowException(BFA_ILLEGAL_DELEGATE_METHOD);
}
}
//
// Create a new bmtMDMethod representing this method and add it to the
// declared method list.
//
bmtMDMethod * pNewMethod = new (GetStackingAllocator()) bmtMDMethod(
bmtInternal->pType,
tok,
dwMemberAttrs,
dwImplFlags,
dwMethodRVA,
type,
implType);
bmtMethod->AddDeclaredMethod(pNewMethod);
//
// Update the count of the various types of methods.
//
bmtVT->dwMaxVtableSize++;
// Increment the number of non-abstract declared methods
if (!IsMdAbstract(dwMemberAttrs))
{
bmtMethod->dwNumDeclaredNonAbstractMethods++;
}
}
// Check to see that we have all of the required delegate methods (ECMA 13.6 Delegates)
if (IsDelegate())
{
// Do we have all four special delegate methods
// or just the two special delegate methods
if ((delegateMethodsSeen != (SeenCtor | SeenInvoke | SeenBeginInvoke | SeenEndInvoke)) &&
(delegateMethodsSeen != (SeenCtor | SeenInvoke)) )
{
BuildMethodTableThrowException(BFA_MISSING_DELEGATE_METHOD);
}
}
if (i != cMethAndGaps)
{
BuildMethodTableThrowException(COR_E_BADIMAGEFORMAT, IDS_CLASSLOAD_BAD_METHOD_COUNT, mdTokenNil);
}
#ifdef FEATURE_COMINTEROP
//
// If the interface is sparse, we need to finalize the mapping list by
// telling it how many real methods we found.
//
if (bmtProp->fSparse)
{
GetHalfBakedClass()->GetSparseCOMInteropVTableMap()->FinalizeMapping(NumDeclaredMethods());
}
#endif // FEATURE_COMINTEROP
} // MethodTableBuilder::EnumerateClassMethods
#ifdef _PREFAST_
#pragma warning(pop)
#endif
//*******************************************************************************
//
// Run through the field list and calculate the following:
// # static fields
// # static fields that contain object refs.
// # instance fields
//
VOID
MethodTableBuilder::EnumerateClassFields()
{
STANDARD_VM_CONTRACT;
HRESULT hr = S_OK;
DWORD i;
IMDInternalImport *pMDInternalImport = GetMDImport();
mdToken tok;
DWORD dwMemberAttrs;
bmtEnumFields->dwNumStaticFields = 0;
bmtEnumFields->dwNumStaticObjRefFields = 0;
bmtEnumFields->dwNumStaticBoxedFields = 0;
bmtEnumFields->dwNumThreadStaticFields = 0;
bmtEnumFields->dwNumThreadStaticObjRefFields = 0;
bmtEnumFields->dwNumThreadStaticBoxedFields = 0;
bmtEnumFields->dwNumInstanceFields = 0;
HENUMInternalHolder hEnumField(pMDInternalImport);
hr = hEnumField.EnumInitNoThrow(mdtFieldDef, GetCl());
if (FAILED(hr))
{
BuildMethodTableThrowException(hr, *bmtError);
}
bmtMetaData->cFields = hEnumField.EnumGetCount();
// Retrieve the fields and store them in a temp array.
bmtMetaData->pFields = new (GetStackingAllocator()) mdToken[bmtMetaData->cFields];
bmtMetaData->pFieldAttrs = new (GetStackingAllocator()) DWORD[bmtMetaData->cFields];
DWORD dwFieldLiteralInitOnly = fdLiteral | fdInitOnly;
DWORD dwMaxFieldDefRid = pMDInternalImport->GetCountWithTokenKind(mdtFieldDef);
for (i = 0; hEnumField.EnumNext(&tok); i++)
{
//
// Retrieve the attributes of the field.
//
DWORD rid = RidFromToken(tok);
if ((rid == 0)||(rid > dwMaxFieldDefRid))
{
BuildMethodTableThrowException(COR_E_BADIMAGEFORMAT, BFA_BAD_FIELD_TOKEN, mdTokenNil);
}
if (FAILED(pMDInternalImport->GetFieldDefProps(tok, &dwMemberAttrs)))
{
BuildMethodTableThrowException(COR_E_BADIMAGEFORMAT, BFA_BAD_FIELD_TOKEN, tok);
}
//
// Store the field and its attributes in the bmtMetaData structure for later use.
//
bmtMetaData->pFields[i] = tok;
bmtMetaData->pFieldAttrs[i] = dwMemberAttrs;
if((dwMemberAttrs & fdFieldAccessMask)==fdFieldAccessMask)
{
BuildMethodTableThrowException(BFA_INVALID_FIELD_ACC_FLAGS);
}
if((dwMemberAttrs & dwFieldLiteralInitOnly)==dwFieldLiteralInitOnly)
{
BuildMethodTableThrowException(BFA_FIELD_LITERAL_AND_INIT);
}
// can only have static global fields
if(IsGlobalClass())
{
if(!IsFdStatic(dwMemberAttrs))
{
BuildMethodTableThrowException(BFA_NONSTATIC_GLOBAL_FIELD);
}
}
//
// Update the count of the various types of fields.
//
if (IsFdStatic(dwMemberAttrs))
{
if (!IsFdLiteral(dwMemberAttrs))
{
#ifdef FEATURE_TYPEEQUIVALENCE
if (bmtProp->fIsTypeEquivalent)
{
BuildMethodTableThrowException(IDS_CLASSLOAD_EQUIVALENTSTRUCTFIELDS);
}
#endif
bmtEnumFields->dwNumStaticFields++;
// If this static field is thread static, then we need
// to increment bmtEnumFields->dwNumThreadStaticFields
hr = pMDInternalImport->GetCustomAttributeByName(tok,
g_ThreadStaticAttributeClassName,
NULL, NULL);
IfFailThrow(hr);
if (hr == S_OK)
{
// It's a thread static, so increment the count
bmtEnumFields->dwNumThreadStaticFields++;
}
}
}
else
{
#ifdef FEATURE_TYPEEQUIVALENCE
if (!IsFdPublic(dwMemberAttrs) && bmtProp->fIsTypeEquivalent)
{
BuildMethodTableThrowException(IDS_CLASSLOAD_EQUIVALENTSTRUCTFIELDS);
}
#endif
if (!IsFdLiteral(dwMemberAttrs))
{
bmtEnumFields->dwNumInstanceFields++;
}
if(IsInterface())
{
BuildMethodTableThrowException(BFA_INSTANCE_FIELD_IN_INT);
}
}
}
if (i != bmtMetaData->cFields)
{
BuildMethodTableThrowException(COR_E_BADIMAGEFORMAT, IDS_CLASSLOAD_BAD_FIELD_COUNT, mdTokenNil);
}
if(IsEnum() && (bmtEnumFields->dwNumInstanceFields==0))
{
BuildMethodTableThrowException(BFA_INSTANCE_FIELD_IN_ENUM);
}
bmtEnumFields->dwNumDeclaredFields = bmtEnumFields->dwNumStaticFields + bmtEnumFields->dwNumInstanceFields;
}
//*******************************************************************************
//
// Used by BuildMethodTable
//
// Determines the maximum size of the vtable and allocates the temporary storage arrays
// Also copies the parent's vtable into the working vtable.
//
VOID MethodTableBuilder::AllocateWorkingSlotTables()
{
CONTRACTL
{
STANDARD_VM_CHECK;
PRECONDITION(CheckPointer(this));
PRECONDITION(CheckPointer(bmtAllocator));
PRECONDITION(CheckPointer(bmtMFDescs));
PRECONDITION(CheckPointer(bmtMetaData));
PRECONDITION(CheckPointer(bmtVT));
PRECONDITION(CheckPointer(bmtEnumFields));
PRECONDITION(CheckPointer(bmtInterface));
PRECONDITION(CheckPointer(bmtFP));
PRECONDITION(CheckPointer(bmtParent));
}
CONTRACTL_END;
// Allocate a FieldDesc* for each field
bmtMFDescs->ppFieldDescList = new (GetStackingAllocator()) FieldDesc*[bmtMetaData->cFields];
ZeroMemory(bmtMFDescs->ppFieldDescList, bmtMetaData->cFields * sizeof(FieldDesc *));
// Create a temporary function table (we don't know how large the vtable will be until the very end,
// since we don't yet know how many declared methods are overrides vs. newslots).
if (IsValueClass())
{ // ValueClass virtuals are converted into non-virtual methods and the virtual slots
// become unboxing stubs that forward to these new non-virtual methods. This has the
// side effect of doubling the number of slots introduced by newslot virtuals.
bmtVT->dwMaxVtableSize += NumDeclaredMethods();
}
_ASSERTE(!HasParent() || (bmtInterface->dwInterfaceMapSize - GetParentMethodTable()->GetNumInterfaces()) >= 0);
if (HasParent())
{ // Add parent vtable size. <TODO> This should actually be the parent's virtual method count. </TODO>
bmtVT->dwMaxVtableSize += bmtParent->pSlotTable->GetSlotCount();
}
S_SLOT_INDEX cMaxSlots = AsClrSafeInt(bmtVT->dwMaxVtableSize) + AsClrSafeInt(NumDeclaredMethods());
if (cMaxSlots.IsOverflow() || MAX_SLOT_INDEX < cMaxSlots.Value())
cMaxSlots = S_SLOT_INDEX(MAX_SLOT_INDEX);
// Allocate the temporary vtable
bmtVT->pSlotTable = new (GetStackingAllocator())
bmtMethodSlotTable(cMaxSlots.Value(), GetStackingAllocator());
if (HasParent())
{
#if 0
// @<TODO>todo: Figure out the right way to override Equals for value
// types only.
//
// This is broken because
// (a) g_pObjectClass->FindMethod("Equals", &gsig_IM_Obj_RetBool); will return
// the EqualsValue method
// (b) When mscorlib has been preloaded (and thus the munge already done
// ahead of time), we cannot easily find both methods
// to compute EqualsAddr & EqualsSlot
//
// For now, the Equals method has a runtime check to see if it's
// comparing value types.
//</TODO>
// If it is a value type, over ride a few of the base class methods.
if (IsValueClass())
{
static WORD EqualsSlot;
// If we haven't been through here yet, get some stuff from the Object class definition.
if (EqualsSlot == NULL)
{
// Get the slot of the Equals method.
MethodDesc *pEqualsMD = g_pObjectClass->FindMethod("Equals", &gsig_IM_Obj_RetBool);
THROW_BAD_FORMAT_MAYBE(pEqualsMD != NULL, 0, this);
EqualsSlot = pEqualsMD->GetSlot();
// Get the address of the EqualsValue method.
MethodDesc *pEqualsValueMD = g_pObjectClass->FindMethod("EqualsValue", &gsig_IM_Obj_RetBool);
THROW_BAD_FORMAT_MAYBE(pEqualsValueMD != NULL, 0, this);
// Patch the EqualsValue method desc in a dangerous way to
// look like the Equals method desc.
pEqualsValueMD->SetSlot(EqualsSlot);
pEqualsValueMD->SetMemberDef(pEqualsMD->GetMemberDef());
}
// Override the valuetype "Equals" with "EqualsValue".
bmtVT->SetMethodDescForSlot(EqualsSlot, EqualsSlot);
}
#endif // 0
}
S_UINT32 cEntries = S_UINT32(2) * S_UINT32(NumDeclaredMethods());
if (cEntries.IsOverflow())
{
ThrowHR(COR_E_OVERFLOW);
}
}
//*******************************************************************************
//
// Used by BuildMethodTable
//
// Allocate a MethodDesc* for each method (needed later when doing interfaces), and a FieldDesc* for each field
//
VOID MethodTableBuilder::AllocateFieldDescs()
{
CONTRACTL
{
STANDARD_VM_CHECK;
PRECONDITION(CheckPointer(this));
PRECONDITION(CheckPointer(bmtAllocator));
PRECONDITION(CheckPointer(bmtMFDescs));
PRECONDITION(CheckPointer(bmtMetaData));
PRECONDITION(CheckPointer(bmtVT));
PRECONDITION(CheckPointer(bmtEnumFields));
PRECONDITION(CheckPointer(bmtFP));
PRECONDITION(CheckPointer(bmtParent));
}
CONTRACTL_END;
// We'll be counting the # fields of each size as we go along
for (DWORD i = 0; i <= MAX_LOG2_PRIMITIVE_FIELD_SIZE; i++)
{
bmtFP->NumRegularStaticFieldsOfSize[i] = 0;
bmtFP->NumThreadStaticFieldsOfSize[i] = 0;
bmtFP->NumInstanceFieldsOfSize[i] = 0;
}
//
// Allocate blocks of MethodDescs and FieldDescs for all declared methods and fields
//
// In order to avoid allocating a field pointing back to the method
// table in every single method desc, we allocate memory in the
// following manner:
// o Field descs get a single contiguous block.
// o Method descs of different sizes (normal vs NDirect) are
// allocated in different MethodDescChunks.
// o Each method desc chunk starts with a header, and has
// at most MAX_ method descs (if there are more
// method descs of a given size, multiple chunks are allocated).
// This way method descs can use an 8-bit offset field to locate the
// pointer to their method table.
//
/////////////////////////////////////////////////////////////////
// Allocate fields
if (NumDeclaredFields() > 0)
{
GetHalfBakedClass()->SetFieldDescList((FieldDesc *)
AllocateFromHighFrequencyHeap(S_SIZE_T(NumDeclaredFields()) * S_SIZE_T(sizeof(FieldDesc))));
INDEBUG(GetClassLoader()->m_dwDebugFieldDescs += NumDeclaredFields();)
INDEBUG(GetClassLoader()->m_dwFieldDescData += (NumDeclaredFields() * sizeof(FieldDesc));)
}
}
#ifdef FEATURE_DOUBLE_ALIGNMENT_HINT
//*******************************************************************************
//
// Heuristic to determine if we should have instances of this class 8 byte aligned
//
BOOL MethodTableBuilder::ShouldAlign8(DWORD dwR8Fields, DWORD dwTotalFields)
{
LIMITED_METHOD_CONTRACT;
return dwR8Fields*2>dwTotalFields && dwR8Fields>=2;
}
#endif
//*******************************************************************************
BOOL MethodTableBuilder::IsSelfReferencingStaticValueTypeField(mdToken dwByValueClassToken,
bmtInternalInfo* bmtInternal,
const bmtGenericsInfo *bmtGenerics,
PCCOR_SIGNATURE pMemberSignature,
DWORD cMemberSignature)
{
STANDARD_VM_CONTRACT;
if (dwByValueClassToken != this->GetCl())
{
return FALSE;
}
if (!bmtGenerics->HasInstantiation())
{
return TRUE;
}
// The value class is generic. Check that the signature of the field
// is _exactly_ equivalent to VC<!0, !1, !2, ...>. Do this by consing up a fake
// signature.
DWORD nGenericArgs = bmtGenerics->GetNumGenericArgs();
CONSISTENCY_CHECK(nGenericArgs != 0);
SigBuilder sigBuilder;
sigBuilder.AppendElementType(ELEMENT_TYPE_GENERICINST);
sigBuilder.AppendElementType(ELEMENT_TYPE_VALUETYPE);
sigBuilder.AppendToken(dwByValueClassToken);
sigBuilder.AppendData(nGenericArgs);
for (unsigned int typearg = 0; typearg < nGenericArgs; typearg++)
{
sigBuilder.AppendElementType(ELEMENT_TYPE_VAR);
sigBuilder.AppendData(typearg);
}
DWORD cFakeSig;
PCCOR_SIGNATURE pFakeSig = (PCCOR_SIGNATURE)sigBuilder.GetSignature(&cFakeSig);
PCCOR_SIGNATURE pFieldSig = pMemberSignature + 1; // skip the CALLCONV_FIELD
return MetaSig::CompareElementType(pFakeSig, pFieldSig,
pFakeSig + cFakeSig, pMemberSignature + cMemberSignature,
GetModule(), GetModule(),
NULL, NULL);
}
//*******************************************************************************
//
// Used pByValueClass cache to mark self-references
//
static BOOL IsSelfRef(MethodTable * pMT)
{
return pMT == (MethodTable *)-1;
}
//*******************************************************************************
//
// Used by BuildMethodTable
//
// Go thru all fields and initialize their FieldDescs.
//
#ifdef _PREFAST_
#pragma warning(push)
#pragma warning(disable:21000) // Suppress PREFast warning about overly large function
#endif // _PREFAST_
VOID MethodTableBuilder::InitializeFieldDescs(FieldDesc *pFieldDescList,
const LayoutRawFieldInfo* pLayoutRawFieldInfos,
bmtInternalInfo* bmtInternal,
const bmtGenericsInfo* bmtGenerics,
bmtMetaDataInfo* bmtMetaData,
bmtEnumFieldInfo* bmtEnumFields,
bmtErrorInfo* bmtError,
MethodTable *** pByValueClassCache,
bmtMethAndFieldDescs* bmtMFDescs,
bmtFieldPlacement* bmtFP,
bmtContextStaticInfo* pbmtCSInfo,
unsigned* totalDeclaredSize)
{
CONTRACTL
{
STANDARD_VM_CHECK;
PRECONDITION(CheckPointer(this));
PRECONDITION(CheckPointer(bmtInternal));
PRECONDITION(CheckPointer(bmtGenerics));
PRECONDITION(CheckPointer(bmtMetaData));
PRECONDITION(CheckPointer(bmtEnumFields));
PRECONDITION(CheckPointer(bmtError));
PRECONDITION(CheckPointer(pByValueClassCache));
PRECONDITION(CheckPointer(bmtMFDescs));
PRECONDITION(CheckPointer(bmtFP));
PRECONDITION(CheckPointer(totalDeclaredSize));
}
CONTRACTL_END;
DWORD i;
IMDInternalImport * pInternalImport = GetMDImport(); // to avoid multiple dereferencings
FieldMarshaler * pNextFieldMarshaler = NULL;
if (HasLayout())
{
pNextFieldMarshaler = (FieldMarshaler*)(GetLayoutInfo()->GetFieldMarshalers());
}
//========================================================================
// BEGIN:
// Go thru all fields and initialize their FieldDescs.
//========================================================================
DWORD dwCurrentDeclaredField = 0;
DWORD dwCurrentStaticField = 0;
DWORD dwCurrentThreadStaticField = 0;
DWORD dwR8Fields = 0; // Number of R8's the class has
#ifdef FEATURE_64BIT_ALIGNMENT
// Track whether any field in this type requires 8-byte alignment
BOOL fFieldRequiresAlign8 = HasParent() ? GetParentMethodTable()->RequiresAlign8() : FALSE;
#endif
for (i = 0; i < bmtMetaData->cFields; i++)
{
PCCOR_SIGNATURE pMemberSignature;
DWORD cMemberSignature;
DWORD dwMemberAttrs;
dwMemberAttrs = bmtMetaData->pFieldAttrs[i];
BOOL fIsStatic = IsFdStatic(dwMemberAttrs);
// We don't store static final primitive fields in the class layout
if (IsFdLiteral(dwMemberAttrs))
continue;
if (!IsFdPublic(dwMemberAttrs))
SetHasNonPublicFields();
if (IsFdNotSerialized(dwMemberAttrs))
SetCannotBeBlittedByObjectCloner();
IfFailThrow(pInternalImport->GetSigOfFieldDef(bmtMetaData->pFields[i], &cMemberSignature, &pMemberSignature));
// Signature validation
IfFailThrow(validateTokenSig(bmtMetaData->pFields[i],pMemberSignature,cMemberSignature,dwMemberAttrs,pInternalImport));
FieldDesc * pFD;
DWORD dwLog2FieldSize = 0;
BOOL bCurrentFieldIsGCPointer = FALSE;
mdToken dwByValueClassToken = 0;
MethodTable * pByValueClass = NULL;
BOOL fIsByValue = FALSE;
BOOL fIsThreadStatic = FALSE;
static const BOOL fIsContextStatic = FALSE;
BOOL fHasRVA = FALSE;
MetaSig fsig(pMemberSignature,
cMemberSignature,
GetModule(),
&bmtGenerics->typeContext,
MetaSig::sigField);
CorElementType ElementType = fsig.NextArg();
// Get type
if (!isCallConv(fsig.GetCallingConvention(), IMAGE_CEE_CS_CALLCONV_FIELD))
{
IfFailThrow(COR_E_TYPELOAD);
}
// Determine if a static field is special i.e. RVA based, local to
// a thread or a context
if (fIsStatic)
{
if (IsFdHasFieldRVA(dwMemberAttrs))
{
fHasRVA = TRUE;
}
HRESULT hr;
hr = pInternalImport->GetCustomAttributeByName(bmtMetaData->pFields[i],
g_ThreadStaticAttributeClassName,
NULL, NULL);
IfFailThrow(hr);
if (hr == S_OK)
{
fIsThreadStatic = TRUE;
}
if (ElementType == ELEMENT_TYPE_VALUETYPE)
{
hr = pInternalImport->GetCustomAttributeByName(bmtMetaData->pFields[i],
g_CompilerServicesFixedAddressValueTypeAttribute,
NULL, NULL);
IfFailThrow(hr);
if (hr == S_OK)
{
bmtFP->fHasFixedAddressValueTypes = true;
}
}
// Do some sanity checks that we are not mixing context and thread
// relative statics.
if (fHasRVA && (fIsThreadStatic || fIsContextStatic))
{
IfFailThrow(COR_E_TYPELOAD);
}
if ((fIsThreadStatic || fIsContextStatic || bmtFP->fHasFixedAddressValueTypes) && GetAssembly()->IsCollectible())
{
if (bmtFP->fHasFixedAddressValueTypes)
{
BuildMethodTableThrowException(IDS_CLASSLOAD_COLLECTIBLEFIXEDVTATTR);
}
BuildMethodTableThrowException(IDS_CLASSLOAD_COLLECTIBLESPECIALSTATICS);
}
}
GOT_ELEMENT_TYPE:
// Type to store in FieldDesc - we don't want to have extra case statements for
// ELEMENT_TYPE_STRING, SDARRAY etc., so we convert all object types to CLASS.
// Also, BOOLEAN, CHAR are converted to U1, I2.
CorElementType FieldDescElementType = ElementType;
switch (ElementType)
{
case ELEMENT_TYPE_I1:
case ELEMENT_TYPE_U1:
{
dwLog2FieldSize = 0;
break;
}
case ELEMENT_TYPE_I2:
case ELEMENT_TYPE_U2:
{
dwLog2FieldSize = 1;
break;
}
case ELEMENT_TYPE_I4:
case ELEMENT_TYPE_U4:
IN_TARGET_32BIT(case ELEMENT_TYPE_I:)
IN_TARGET_32BIT(case ELEMENT_TYPE_U:)
case ELEMENT_TYPE_R4:
{
dwLog2FieldSize = 2;
break;
}
case ELEMENT_TYPE_BOOLEAN:
{
// FieldDescElementType = ELEMENT_TYPE_U1;
dwLog2FieldSize = 0;
break;
}
case ELEMENT_TYPE_CHAR: