diff --git a/src/vm/amd64/umthunkstub.S b/src/vm/amd64/umthunkstub.S index e388f154908e..3e60bedb3f26 100644 --- a/src/vm/amd64/umthunkstub.S +++ b/src/vm/amd64/umthunkstub.S @@ -83,7 +83,7 @@ LOCAL_LABEL(HaveThread): mov r12, rax // r12 <- Thread* - //FailFast if a native callable method invoked via ldftn and calli. + //FailFast if a native callable method is invoked via ldftn and calli. cmp dword ptr [r12 + OFFSETOF__Thread__m_fPreemptiveGCDisabled], 1 jz LOCAL_LABEL(InvalidTransition) diff --git a/src/vm/i386/asmconstants.h b/src/vm/i386/asmconstants.h index 5fd39d6897af..c42b167f3203 100644 --- a/src/vm/i386/asmconstants.h +++ b/src/vm/i386/asmconstants.h @@ -449,6 +449,24 @@ ASMCONSTANTS_C_ASSERT(CallDescrData__fpReturnSize == offsetof(CallDescrD ASMCONSTANTS_C_ASSERT(CallDescrData__pTarget == offsetof(CallDescrData, pTarget)) ASMCONSTANTS_C_ASSERT(CallDescrData__returnValue == offsetof(CallDescrData, returnValue)) +#define UMEntryThunk__m_pUMThunkMarshInfo 0x0C +ASMCONSTANTS_C_ASSERT(UMEntryThunk__m_pUMThunkMarshInfo == offsetof(UMEntryThunk, m_pUMThunkMarshInfo)) + +#define UMEntryThunk__m_dwDomainId 0x10 +ASMCONSTANTS_C_ASSERT(UMEntryThunk__m_dwDomainId == offsetof(UMEntryThunk, m_dwDomainId)) + +#define UMThunkMarshInfo__m_pILStub 0x00 +ASMCONSTANTS_C_ASSERT(UMThunkMarshInfo__m_pILStub == offsetof(UMThunkMarshInfo, m_pILStub)) + +#define UMThunkMarshInfo__m_cbActualArgSize 0x04 +ASMCONSTANTS_C_ASSERT(UMThunkMarshInfo__m_cbActualArgSize == offsetof(UMThunkMarshInfo, m_cbActualArgSize)) + +#ifndef CROSSGEN_COMPILE +#define Thread__m_pDomain 0x14 +ASMCONSTANTS_C_ASSERT(Thread__m_pDomain == offsetof(Thread, m_pDomain)); + +#endif + #undef ASMCONSTANTS_C_ASSERT #undef ASMCONSTANTS_RUNTIME_ASSERT diff --git a/src/vm/i386/umthunkstub.S b/src/vm/i386/umthunkstub.S index 2bc6fb702fd8..728964bdb604 100644 --- a/src/vm/i386/umthunkstub.S +++ b/src/vm/i386/umthunkstub.S @@ -14,9 +14,10 @@ NESTED_ENTRY TheUMEntryPrestub, _TEXT, UnhandledExceptionHandlerUnix push ecx push edx - push eax + push eax // UMEntryThunk* call C_FUNC(TheUMEntryPrestubWorker) pop edx + // eax = PCODE // Restore argument registers pop edx @@ -25,8 +26,152 @@ NESTED_ENTRY TheUMEntryPrestub, _TEXT, UnhandledExceptionHandlerUnix jmp eax // Tail Jmp NESTED_END TheUMEntryPrestub, _TEXT - +// +// eax: UMEntryThunk* +// NESTED_ENTRY UMThunkStub, _TEXT, UnhandledExceptionHandlerUnix - int 3 // implement here + +#define UMThunkStub_SAVEDREG (3*4) // ebx, esi, edi +#define UMThunkStub_LOCALVARS (2*4) // UMEntryThunk*, Thread* +#define UMThunkStub_INT_ARG_SPILL (2*4) // for save ecx, edx +#define UMThunkStub_UMENTRYTHUNK_OFFSET (UMThunkStub_SAVEDREG+4) +#define UMThunkStub_THREAD_OFFSET (UMThunkStub_UMENTRYTHUNK_OFFSET+4) +#define UMThunkStub_INT_ARG_OFFSET (UMThunkStub_THREAD_OFFSET+4) +#define UMThunkStub_FIXEDALLOCSIZE (UMThunkStub_LOCALVARS+UMThunkStub_INT_ARG_SPILL) + +// return address <-- entry ESP +// saved ebp <-- EBP +// saved ebx +// saved esi +// saved edi +// UMEntryThunk* +// Thread* +// save ecx +// save edx +// {optional stack args passed to callee} <-- new esp + + PROLOG_BEG + PROLOG_PUSH ebx + PROLOG_PUSH esi + PROLOG_PUSH edi + PROLOG_END + sub esp, UMThunkStub_FIXEDALLOCSIZE + + mov dword ptr [ebp - UMThunkStub_INT_ARG_OFFSET], ecx + mov dword ptr [ebp - UMThunkStub_INT_ARG_OFFSET - 0x04], edx + + mov dword ptr [ebp - UMThunkStub_UMENTRYTHUNK_OFFSET], eax + + call C_FUNC(GetThread) + test eax, eax + jz LOCAL_LABEL(DoThreadSetup) + +LOCAL_LABEL(HaveThread): + + mov dword ptr [ebp - UMThunkStub_THREAD_OFFSET], eax + + // FailFast if a native callable method is invoked via ldftn and calli. + cmp dword ptr [eax + Thread_m_fPreemptiveGCDisabled], 1 + jz LOCAL_LABEL(InvalidTransition) + + // disable preemptive GC + mov dword ptr [eax + Thread_m_fPreemptiveGCDisabled], 1 + + // catch returning thread here if a GC is in progress + PREPARE_EXTERNAL_VAR g_TrapReturningThreads, eax + cmp eax, 0 + jnz LOCAL_LABEL(DoTrapReturningThreadsTHROW) + +LOCAL_LABEL(InCooperativeMode): + +#if _DEBUG + mov eax, dword ptr [ebp - UMThunkStub_THREAD_OFFSET] + mov eax, dword ptr [eax + Thread__m_pDomain] + mov esi, dword ptr [eax + AppDomain__m_dwId] + + mov eax, dword ptr [ebp - UMThunkStub_UMENTRYTHUNK_OFFSET] + mov edi, dword ptr [eax + UMEntryThunk__m_dwDomainId] + + cmp esi, edi + jne LOCAL_LABEL(WrongAppDomain) +#endif + + mov eax, dword ptr [ebp - UMThunkStub_UMENTRYTHUNK_OFFSET] + mov ebx, dword ptr [eax + UMEntryThunk__m_pUMThunkMarshInfo] + mov eax, dword ptr [ebx + UMThunkMarshInfo__m_cbActualArgSize] + test eax, eax + jnz LOCAL_LABEL(UMThunkStub_CopyStackArgs) + +LOCAL_LABEL(UMThunkStub_ArgumentsSetup): + + mov ecx, dword ptr [ebp - UMThunkStub_INT_ARG_OFFSET] + mov edx, dword ptr [ebp - UMThunkStub_INT_ARG_OFFSET - 0x04] + + mov eax, dword ptr [ebp - UMThunkStub_UMENTRYTHUNK_OFFSET] + mov ebx, dword ptr [eax + UMEntryThunk__m_pUMThunkMarshInfo] + mov ebx, dword ptr [ebx + UMThunkMarshInfo__m_pILStub] + + call ebx + +LOCAL_LABEL(PostCall): + + mov ebx, dword ptr [ebp - UMThunkStub_THREAD_OFFSET] + mov dword ptr [ebx + Thread_m_fPreemptiveGCDisabled], 0 + + lea esp, [ebp - UMThunkStub_SAVEDREG] // deallocate arguments + EPILOG_BEG + EPILOG_POP edi + EPILOG_POP esi + EPILOG_POP ebx + EPILOG_END ret + +LOCAL_LABEL(DoThreadSetup): + + call C_FUNC(CreateThreadBlockThrow) + jmp LOCAL_LABEL(HaveThread) + +LOCAL_LABEL(InvalidTransition): + + //No arguments to setup , ReversePInvokeBadTransition will failfast + call C_FUNC(ReversePInvokeBadTransition) + +LOCAL_LABEL(DoTrapReturningThreadsTHROW): + + // extern "C" VOID STDCALL UMThunkStubRareDisableWorker(Thread *pThread, UMEntryThunk *pUMEntryThunk) + mov eax, dword ptr [ebp - UMThunkStub_UMENTRYTHUNK_OFFSET] + push eax + mov eax, dword ptr [ebp - UMThunkStub_THREAD_OFFSET] + push eax + call C_FUNC(UMThunkStubRareDisableWorker) + + jmp LOCAL_LABEL(InCooperativeMode) + +LOCAL_LABEL(UMThunkStub_CopyStackArgs): + + // eax = m_cbActualArgSize + sub esp, eax + and esp, -16 // align with 16 byte + lea esi, [ebp + 0x08] + lea edi, [esp] + +LOCAL_LABEL(CopyLoop): + + // eax = number of bytes + // esi = src + // edi = dest + // edx = sratch + + add eax, -4 + mov edx, dword ptr [esi + eax] + mov dword ptr [edi + eax], edx + jnz LOCAL_LABEL(CopyLoop) + + jmp LOCAL_LABEL(UMThunkStub_ArgumentsSetup) + +#if _DEBUG +LOCAL_LABEL(WrongAppDomain): + int3 +#endif + NESTED_END UMThunkStub, _TEXT diff --git a/src/vm/stackwalk.cpp b/src/vm/stackwalk.cpp index dbc83f4a0156..18a890003952 100644 --- a/src/vm/stackwalk.cpp +++ b/src/vm/stackwalk.cpp @@ -2677,7 +2677,7 @@ StackWalkAction StackFrameIterator::NextRaw(void) // We are transitioning from unmanaged code to managed code... lets do some validation of our // EH mechanism on platforms that we can. -#if defined(_DEBUG) && !defined(DACCESS_COMPILE) && defined(_TARGET_X86_) +#if defined(_DEBUG) && !defined(DACCESS_COMPILE) && (defined(_TARGET_X86_) && !defined(FEATURE_STUBS_AS_IL)) VerifyValidTransitionFromManagedCode(m_crawl.pThread, &m_crawl); #endif // _DEBUG && !DACCESS_COMPILE && _TARGET_X86_ }